var/home/core/zuul-output/0000755000175000017500000000000015067227537014543 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015067240041015470 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000004605125415067240032017703 0ustar rootrootOct 01 13:44:36 crc systemd[1]: Starting Kubernetes Kubelet... Oct 01 13:44:36 crc restorecon[4559]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:36 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 01 13:44:37 crc restorecon[4559]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 01 13:44:37 crc restorecon[4559]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Oct 01 13:44:37 crc kubenswrapper[4605]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 01 13:44:37 crc kubenswrapper[4605]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Oct 01 13:44:37 crc kubenswrapper[4605]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 01 13:44:37 crc kubenswrapper[4605]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 01 13:44:37 crc kubenswrapper[4605]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Oct 01 13:44:37 crc kubenswrapper[4605]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.667809 4605 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.680443 4605 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.680487 4605 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.680493 4605 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.680500 4605 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.680507 4605 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.680513 4605 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.680529 4605 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.680536 4605 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.680541 4605 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.680546 4605 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.680551 4605 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.680574 4605 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.680581 4605 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.680587 4605 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.680592 4605 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.680597 4605 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.680602 4605 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.680607 4605 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.680611 4605 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.680616 4605 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.680621 4605 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.680626 4605 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.680631 4605 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.680635 4605 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.680640 4605 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.680645 4605 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.680650 4605 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.680655 4605 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.680660 4605 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.680665 4605 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.680674 4605 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.680680 4605 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.680686 4605 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.680692 4605 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.680697 4605 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.680705 4605 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.680712 4605 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.680718 4605 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.680723 4605 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.680729 4605 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.680734 4605 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.680740 4605 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.680745 4605 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.680750 4605 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.680755 4605 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.680759 4605 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.680764 4605 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.680769 4605 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.680773 4605 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.680778 4605 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.680783 4605 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.680788 4605 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.680792 4605 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.680797 4605 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.680805 4605 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.680810 4605 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.680815 4605 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.680820 4605 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.680826 4605 feature_gate.go:330] unrecognized feature gate: Example Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.680832 4605 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.680837 4605 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.680842 4605 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.680848 4605 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.680853 4605 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.680858 4605 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.680864 4605 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.680869 4605 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.680881 4605 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.680893 4605 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.680900 4605 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.680905 4605 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681034 4605 flags.go:64] FLAG: --address="0.0.0.0" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681048 4605 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681057 4605 flags.go:64] FLAG: --anonymous-auth="true" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681065 4605 flags.go:64] FLAG: --application-metrics-count-limit="100" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681073 4605 flags.go:64] FLAG: --authentication-token-webhook="false" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681079 4605 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681087 4605 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681119 4605 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681125 4605 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681132 4605 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681138 4605 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681144 4605 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681151 4605 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681156 4605 flags.go:64] FLAG: --cgroup-root="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681162 4605 flags.go:64] FLAG: --cgroups-per-qos="true" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681168 4605 flags.go:64] FLAG: --client-ca-file="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681173 4605 flags.go:64] FLAG: --cloud-config="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681180 4605 flags.go:64] FLAG: --cloud-provider="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681187 4605 flags.go:64] FLAG: --cluster-dns="[]" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681201 4605 flags.go:64] FLAG: --cluster-domain="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681213 4605 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681221 4605 flags.go:64] FLAG: --config-dir="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681228 4605 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681235 4605 flags.go:64] FLAG: --container-log-max-files="5" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681243 4605 flags.go:64] FLAG: --container-log-max-size="10Mi" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681249 4605 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681255 4605 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681262 4605 flags.go:64] FLAG: --containerd-namespace="k8s.io" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681268 4605 flags.go:64] FLAG: --contention-profiling="false" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681274 4605 flags.go:64] FLAG: --cpu-cfs-quota="true" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681280 4605 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681286 4605 flags.go:64] FLAG: --cpu-manager-policy="none" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681293 4605 flags.go:64] FLAG: --cpu-manager-policy-options="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681301 4605 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681307 4605 flags.go:64] FLAG: --enable-controller-attach-detach="true" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681312 4605 flags.go:64] FLAG: --enable-debugging-handlers="true" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681320 4605 flags.go:64] FLAG: --enable-load-reader="false" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681325 4605 flags.go:64] FLAG: --enable-server="true" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681331 4605 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681339 4605 flags.go:64] FLAG: --event-burst="100" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681345 4605 flags.go:64] FLAG: --event-qps="50" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681351 4605 flags.go:64] FLAG: --event-storage-age-limit="default=0" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681357 4605 flags.go:64] FLAG: --event-storage-event-limit="default=0" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681363 4605 flags.go:64] FLAG: --eviction-hard="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681369 4605 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681375 4605 flags.go:64] FLAG: --eviction-minimum-reclaim="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681381 4605 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681387 4605 flags.go:64] FLAG: --eviction-soft="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681392 4605 flags.go:64] FLAG: --eviction-soft-grace-period="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681398 4605 flags.go:64] FLAG: --exit-on-lock-contention="false" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681403 4605 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681409 4605 flags.go:64] FLAG: --experimental-mounter-path="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681414 4605 flags.go:64] FLAG: --fail-cgroupv1="false" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681420 4605 flags.go:64] FLAG: --fail-swap-on="true" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681426 4605 flags.go:64] FLAG: --feature-gates="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681432 4605 flags.go:64] FLAG: --file-check-frequency="20s" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681438 4605 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681444 4605 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681450 4605 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681456 4605 flags.go:64] FLAG: --healthz-port="10248" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681461 4605 flags.go:64] FLAG: --help="false" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681467 4605 flags.go:64] FLAG: --hostname-override="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681473 4605 flags.go:64] FLAG: --housekeeping-interval="10s" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681479 4605 flags.go:64] FLAG: --http-check-frequency="20s" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681485 4605 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681491 4605 flags.go:64] FLAG: --image-credential-provider-config="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681496 4605 flags.go:64] FLAG: --image-gc-high-threshold="85" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681502 4605 flags.go:64] FLAG: --image-gc-low-threshold="80" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681507 4605 flags.go:64] FLAG: --image-service-endpoint="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681513 4605 flags.go:64] FLAG: --kernel-memcg-notification="false" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681519 4605 flags.go:64] FLAG: --kube-api-burst="100" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681525 4605 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681532 4605 flags.go:64] FLAG: --kube-api-qps="50" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681537 4605 flags.go:64] FLAG: --kube-reserved="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681543 4605 flags.go:64] FLAG: --kube-reserved-cgroup="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681549 4605 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681554 4605 flags.go:64] FLAG: --kubelet-cgroups="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681560 4605 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681566 4605 flags.go:64] FLAG: --lock-file="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681571 4605 flags.go:64] FLAG: --log-cadvisor-usage="false" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681577 4605 flags.go:64] FLAG: --log-flush-frequency="5s" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681583 4605 flags.go:64] FLAG: --log-json-info-buffer-size="0" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681592 4605 flags.go:64] FLAG: --log-json-split-stream="false" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681597 4605 flags.go:64] FLAG: --log-text-info-buffer-size="0" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681603 4605 flags.go:64] FLAG: --log-text-split-stream="false" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681609 4605 flags.go:64] FLAG: --logging-format="text" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681614 4605 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681621 4605 flags.go:64] FLAG: --make-iptables-util-chains="true" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681626 4605 flags.go:64] FLAG: --manifest-url="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681632 4605 flags.go:64] FLAG: --manifest-url-header="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681640 4605 flags.go:64] FLAG: --max-housekeeping-interval="15s" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681646 4605 flags.go:64] FLAG: --max-open-files="1000000" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681653 4605 flags.go:64] FLAG: --max-pods="110" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681659 4605 flags.go:64] FLAG: --maximum-dead-containers="-1" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681664 4605 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681670 4605 flags.go:64] FLAG: --memory-manager-policy="None" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681676 4605 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681682 4605 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681687 4605 flags.go:64] FLAG: --node-ip="192.168.126.11" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681693 4605 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681708 4605 flags.go:64] FLAG: --node-status-max-images="50" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681714 4605 flags.go:64] FLAG: --node-status-update-frequency="10s" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681719 4605 flags.go:64] FLAG: --oom-score-adj="-999" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681731 4605 flags.go:64] FLAG: --pod-cidr="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681736 4605 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681746 4605 flags.go:64] FLAG: --pod-manifest-path="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681751 4605 flags.go:64] FLAG: --pod-max-pids="-1" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681757 4605 flags.go:64] FLAG: --pods-per-core="0" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681763 4605 flags.go:64] FLAG: --port="10250" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681769 4605 flags.go:64] FLAG: --protect-kernel-defaults="false" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681774 4605 flags.go:64] FLAG: --provider-id="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681780 4605 flags.go:64] FLAG: --qos-reserved="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681786 4605 flags.go:64] FLAG: --read-only-port="10255" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681792 4605 flags.go:64] FLAG: --register-node="true" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681797 4605 flags.go:64] FLAG: --register-schedulable="true" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681803 4605 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681814 4605 flags.go:64] FLAG: --registry-burst="10" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681820 4605 flags.go:64] FLAG: --registry-qps="5" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681825 4605 flags.go:64] FLAG: --reserved-cpus="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681831 4605 flags.go:64] FLAG: --reserved-memory="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681838 4605 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681844 4605 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681850 4605 flags.go:64] FLAG: --rotate-certificates="false" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681856 4605 flags.go:64] FLAG: --rotate-server-certificates="false" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681862 4605 flags.go:64] FLAG: --runonce="false" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681869 4605 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681881 4605 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681895 4605 flags.go:64] FLAG: --seccomp-default="false" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681903 4605 flags.go:64] FLAG: --serialize-image-pulls="true" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681911 4605 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681919 4605 flags.go:64] FLAG: --storage-driver-db="cadvisor" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681925 4605 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681931 4605 flags.go:64] FLAG: --storage-driver-password="root" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681937 4605 flags.go:64] FLAG: --storage-driver-secure="false" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681943 4605 flags.go:64] FLAG: --storage-driver-table="stats" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681950 4605 flags.go:64] FLAG: --storage-driver-user="root" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681956 4605 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681962 4605 flags.go:64] FLAG: --sync-frequency="1m0s" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681968 4605 flags.go:64] FLAG: --system-cgroups="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681974 4605 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681984 4605 flags.go:64] FLAG: --system-reserved-cgroup="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681989 4605 flags.go:64] FLAG: --tls-cert-file="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.681995 4605 flags.go:64] FLAG: --tls-cipher-suites="[]" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.682003 4605 flags.go:64] FLAG: --tls-min-version="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.682009 4605 flags.go:64] FLAG: --tls-private-key-file="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.682015 4605 flags.go:64] FLAG: --topology-manager-policy="none" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.682021 4605 flags.go:64] FLAG: --topology-manager-policy-options="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.682026 4605 flags.go:64] FLAG: --topology-manager-scope="container" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.682032 4605 flags.go:64] FLAG: --v="2" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.682040 4605 flags.go:64] FLAG: --version="false" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.682048 4605 flags.go:64] FLAG: --vmodule="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.682055 4605 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.682061 4605 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.682244 4605 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.682257 4605 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.682264 4605 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.682270 4605 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.682275 4605 feature_gate.go:330] unrecognized feature gate: Example Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.682280 4605 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.682285 4605 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.682290 4605 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.682298 4605 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.682304 4605 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.682311 4605 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.682317 4605 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.682322 4605 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.682327 4605 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.682332 4605 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.682337 4605 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.682343 4605 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.682348 4605 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.682353 4605 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.682358 4605 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.682363 4605 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.682367 4605 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.682372 4605 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.682377 4605 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.682382 4605 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.682387 4605 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.682392 4605 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.682398 4605 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.682404 4605 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.682409 4605 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.682415 4605 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.682420 4605 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.682425 4605 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.682430 4605 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.682435 4605 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.682439 4605 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.682445 4605 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.682449 4605 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.682454 4605 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.682459 4605 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.682464 4605 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.682469 4605 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.682475 4605 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.682481 4605 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.682487 4605 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.682492 4605 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.682497 4605 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.682502 4605 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.682508 4605 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.682513 4605 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.682518 4605 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.682522 4605 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.682528 4605 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.682532 4605 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.682537 4605 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.682542 4605 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.682547 4605 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.682552 4605 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.682557 4605 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.682563 4605 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.682569 4605 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.682575 4605 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.682581 4605 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.682587 4605 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.682592 4605 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.682597 4605 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.682602 4605 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.682608 4605 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.682612 4605 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.682617 4605 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.682622 4605 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.682751 4605 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.696185 4605 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.696230 4605 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.696397 4605 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.696418 4605 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.696429 4605 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.696437 4605 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.696447 4605 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.696455 4605 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.696463 4605 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.696472 4605 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.696480 4605 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.696488 4605 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.696496 4605 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.696504 4605 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.696513 4605 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.696521 4605 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.696529 4605 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.696540 4605 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.696551 4605 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.696561 4605 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.696570 4605 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.696580 4605 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.696592 4605 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.696602 4605 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.696610 4605 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.696619 4605 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.696628 4605 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.696637 4605 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.696648 4605 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.696657 4605 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.696665 4605 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.696673 4605 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.696681 4605 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.696689 4605 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.696697 4605 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.696705 4605 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.696713 4605 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.696722 4605 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.696730 4605 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.696738 4605 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.696746 4605 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.696754 4605 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.696763 4605 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.696771 4605 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.696779 4605 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.696787 4605 feature_gate.go:330] unrecognized feature gate: Example Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.696795 4605 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.696802 4605 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.696810 4605 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.696819 4605 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.696826 4605 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.696834 4605 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.696842 4605 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.696851 4605 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.696859 4605 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.696867 4605 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.696875 4605 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.696883 4605 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.696894 4605 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.696904 4605 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.696914 4605 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.696922 4605 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.696932 4605 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.696940 4605 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.696948 4605 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.696957 4605 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.696965 4605 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.696973 4605 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.696981 4605 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.696990 4605 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.696998 4605 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.697006 4605 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.697014 4605 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.697025 4605 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.697274 4605 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.697288 4605 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.697297 4605 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.697305 4605 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.697314 4605 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.697322 4605 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.697330 4605 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.697339 4605 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.697346 4605 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.697356 4605 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.697365 4605 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.697374 4605 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.697383 4605 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.697392 4605 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.697400 4605 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.697410 4605 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.697418 4605 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.697427 4605 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.697434 4605 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.697442 4605 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.697453 4605 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.697463 4605 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.697471 4605 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.697479 4605 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.697487 4605 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.697495 4605 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.697503 4605 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.697511 4605 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.697519 4605 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.697527 4605 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.697535 4605 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.697546 4605 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.697555 4605 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.697566 4605 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.697574 4605 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.697585 4605 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.697595 4605 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.697604 4605 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.697613 4605 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.697621 4605 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.697629 4605 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.697638 4605 feature_gate.go:330] unrecognized feature gate: Example Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.697647 4605 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.697656 4605 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.697665 4605 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.697673 4605 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.697681 4605 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.697691 4605 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.697701 4605 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.697710 4605 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.697719 4605 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.697727 4605 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.697735 4605 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.697742 4605 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.697750 4605 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.697758 4605 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.697766 4605 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.697774 4605 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.697781 4605 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.697789 4605 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.697796 4605 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.697804 4605 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.697812 4605 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.697820 4605 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.697827 4605 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.697836 4605 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.697843 4605 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.697851 4605 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.697860 4605 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.697868 4605 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.697875 4605 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.697887 4605 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.698952 4605 server.go:940] "Client rotation is on, will bootstrap in background" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.705893 4605 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.706032 4605 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.707958 4605 server.go:997] "Starting client certificate rotation" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.708008 4605 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.709628 4605 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-12-18 15:40:23.852777083 +0000 UTC Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.709751 4605 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 1873h55m46.1430311s for next certificate rotation Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.734182 4605 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.741708 4605 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.756636 4605 log.go:25] "Validated CRI v1 runtime API" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.795232 4605 log.go:25] "Validated CRI v1 image API" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.798853 4605 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.805087 4605 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-10-01-13-39-38-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.805130 4605 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:41 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.819123 4605 manager.go:217] Machine: {Timestamp:2025-10-01 13:44:37.81742019 +0000 UTC m=+0.561396438 CPUVendorID:AuthenticAMD NumCores:8 NumPhysicalCores:1 NumSockets:8 CpuFrequency:2800000 MemoryCapacity:25199480832 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:1ac84113-1352-4ad6-8d32-f12829b39b5d BootID:1d17ca42-5162-4e53-b9d0-0c11f7d91daa Filesystems:[{Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:12599742464 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:41 Capacity:2519945216 Type:vfs Inodes:615221 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:3076108 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:12599738368 Type:vfs Inodes:3076108 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:5039898624 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:429496729600 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:75:51:c3 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:75:51:c3 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:1a:d7:09 Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:34:12:bd Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:74:7b:97 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:b8:84:3c Speed:-1 Mtu:1496} {Name:eth10 MacAddress:56:4c:9f:f5:24:79 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:62:63:8c:a7:77:f6 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:25199480832 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.819402 4605 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.819598 4605 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.820032 4605 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.820258 4605 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.820296 4605 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.821164 4605 topology_manager.go:138] "Creating topology manager with none policy" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.821190 4605 container_manager_linux.go:303] "Creating device plugin manager" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.821741 4605 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.821782 4605 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.822448 4605 state_mem.go:36] "Initialized new in-memory state store" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.822566 4605 server.go:1245] "Using root directory" path="/var/lib/kubelet" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.825723 4605 kubelet.go:418] "Attempting to sync node with API server" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.825756 4605 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.825776 4605 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.825792 4605 kubelet.go:324] "Adding apiserver pod source" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.825810 4605 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.830403 4605 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.831488 4605 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.834212 4605 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.834555 4605 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.200:6443: connect: connection refused Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.834574 4605 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.200:6443: connect: connection refused Oct 01 13:44:37 crc kubenswrapper[4605]: E1001 13:44:37.834705 4605 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.200:6443: connect: connection refused" logger="UnhandledError" Oct 01 13:44:37 crc kubenswrapper[4605]: E1001 13:44:37.834723 4605 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.200:6443: connect: connection refused" logger="UnhandledError" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.835747 4605 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.835779 4605 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.835790 4605 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.835800 4605 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.835815 4605 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.835825 4605 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.835844 4605 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.835859 4605 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.835870 4605 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.835882 4605 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.835895 4605 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.835904 4605 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.837012 4605 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.837541 4605 server.go:1280] "Started kubelet" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.838633 4605 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.838625 4605 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.839556 4605 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.200:6443: connect: connection refused Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.839684 4605 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Oct 01 13:44:37 crc systemd[1]: Started Kubernetes Kubelet. Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.844481 4605 server.go:460] "Adding debug handlers to kubelet server" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.845178 4605 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.845232 4605 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.845417 4605 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-12 20:00:39.375871527 +0000 UTC Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.845458 4605 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 2478h16m1.530415024s for next certificate rotation Oct 01 13:44:37 crc kubenswrapper[4605]: E1001 13:44:37.845492 4605 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.845540 4605 volume_manager.go:287] "The desired_state_of_world populator starts" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.845546 4605 volume_manager.go:289] "Starting Kubelet Volume Manager" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.845620 4605 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.847638 4605 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.200:6443: connect: connection refused Oct 01 13:44:37 crc kubenswrapper[4605]: E1001 13:44:37.847877 4605 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.200:6443: connect: connection refused" logger="UnhandledError" Oct 01 13:44:37 crc kubenswrapper[4605]: E1001 13:44:37.856142 4605 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.200:6443: connect: connection refused" interval="200ms" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.857763 4605 factory.go:153] Registering CRI-O factory Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.857810 4605 factory.go:221] Registration of the crio container factory successfully Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.857915 4605 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.857931 4605 factory.go:55] Registering systemd factory Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.857944 4605 factory.go:221] Registration of the systemd container factory successfully Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.857973 4605 factory.go:103] Registering Raw factory Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.857997 4605 manager.go:1196] Started watching for new ooms in manager Oct 01 13:44:37 crc kubenswrapper[4605]: E1001 13:44:37.853025 4605 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.200:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.186a61edaa83a92f default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-10-01 13:44:37.837498671 +0000 UTC m=+0.581474889,LastTimestamp:2025-10-01 13:44:37.837498671 +0000 UTC m=+0.581474889,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.862318 4605 manager.go:319] Starting recovery of all containers Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.868896 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.868966 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.868990 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.869009 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.869027 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.869045 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.869065 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.869082 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.869161 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.869184 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.869205 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.869226 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.869244 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.869265 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.869305 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.869322 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.869358 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.869376 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.869392 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.869409 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.869431 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.869449 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.869468 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.869487 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.869504 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.869521 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.869542 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.869577 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.869597 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.869615 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.869633 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.869652 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.869670 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.869702 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.869719 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.869741 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.869759 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.869777 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.869795 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.869813 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.869833 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.869851 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.869868 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.869886 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.869905 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.869924 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.869944 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.869963 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.869982 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.869999 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.870018 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.870036 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.870061 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.870118 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.870142 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.870161 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.870179 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.870199 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.870218 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.870236 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.870253 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.870271 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.870288 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.870310 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.870329 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.870346 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.870366 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.870384 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.870401 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.870419 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.870436 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.870456 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.870476 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.870495 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.870515 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.870532 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.870549 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.870567 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.870583 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.870602 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.870620 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.870641 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.870661 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.870682 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.870701 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.870719 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.870736 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.870755 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.870772 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.870790 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.870809 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.870827 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.870849 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.870867 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.870886 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.870903 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.870920 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.870939 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.870958 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.870977 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.870996 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.871017 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.871036 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.871054 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.871080 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.871126 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.871172 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.871194 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.871216 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.871237 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.871259 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.871279 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.871301 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.871319 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.871364 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.871383 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.871402 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.871421 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.871442 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.871460 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.871480 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.871501 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.871521 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.871539 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.871560 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.871578 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.871599 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.871618 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.871638 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.871656 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.871676 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.871693 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.871711 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.871730 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.871748 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.871765 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.871782 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.871802 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.871822 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.871843 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.871862 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.871880 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.871899 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.871917 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.871933 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.871950 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.871969 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.871986 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.872006 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.872023 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.872041 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.872058 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.872076 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.872130 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.872150 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.872168 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.872191 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.872209 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.872227 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.872246 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.872273 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.872290 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.872310 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.872328 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.872347 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.872368 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.872385 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.872403 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.872421 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.872439 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.875992 4605 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.876032 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.876053 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.876072 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.876113 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.876133 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.876152 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.876172 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.876185 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.876196 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.876208 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.876220 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.876231 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.876244 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.876257 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.876268 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.876282 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.876294 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.876307 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.876323 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.876335 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.876346 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.876360 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.876372 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.876386 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.876397 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.876408 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.876419 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.876431 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.876445 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.876456 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.876468 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.876480 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.876493 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.876505 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.876518 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.876529 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.876542 4605 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.876554 4605 reconstruct.go:97] "Volume reconstruction finished" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.876563 4605 reconciler.go:26] "Reconciler: start to sync state" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.885254 4605 manager.go:324] Recovery completed Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.894847 4605 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.896211 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.896247 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.896257 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.897829 4605 cpu_manager.go:225] "Starting CPU manager" policy="none" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.897857 4605 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.897885 4605 state_mem.go:36] "Initialized new in-memory state store" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.920386 4605 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.925357 4605 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.925417 4605 status_manager.go:217] "Starting to sync pod status with apiserver" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.925453 4605 kubelet.go:2335] "Starting kubelet main sync loop" Oct 01 13:44:37 crc kubenswrapper[4605]: E1001 13:44:37.926111 4605 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.928345 4605 policy_none.go:49] "None policy: Start" Oct 01 13:44:37 crc kubenswrapper[4605]: W1001 13:44:37.928950 4605 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.200:6443: connect: connection refused Oct 01 13:44:37 crc kubenswrapper[4605]: E1001 13:44:37.929078 4605 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.200:6443: connect: connection refused" logger="UnhandledError" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.930705 4605 memory_manager.go:170] "Starting memorymanager" policy="None" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.930802 4605 state_mem.go:35] "Initializing new in-memory state store" Oct 01 13:44:37 crc kubenswrapper[4605]: E1001 13:44:37.945588 4605 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.983614 4605 manager.go:334] "Starting Device Plugin manager" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.983662 4605 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.983676 4605 server.go:79] "Starting device plugin registration server" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.984194 4605 eviction_manager.go:189] "Eviction manager: starting control loop" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.984211 4605 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.984343 4605 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.984431 4605 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Oct 01 13:44:37 crc kubenswrapper[4605]: I1001 13:44:37.984444 4605 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Oct 01 13:44:37 crc kubenswrapper[4605]: E1001 13:44:37.994138 4605 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.026560 4605 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc","openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.026792 4605 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.028199 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.028245 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.028262 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.028456 4605 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.028685 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.028737 4605 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.032147 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.032196 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.032208 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.034996 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.035021 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.035032 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.035174 4605 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.035699 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.035734 4605 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.036818 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.036843 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.036855 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.036960 4605 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.037332 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.037360 4605 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.037696 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.037716 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.037727 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.038528 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.038546 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.038554 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.039208 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.039247 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.039259 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.039397 4605 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.039505 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.039559 4605 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.040501 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.040527 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.040537 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.040678 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.040708 4605 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.040990 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.041042 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.041070 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.041324 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.041353 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.041363 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:44:38 crc kubenswrapper[4605]: E1001 13:44:38.056991 4605 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.200:6443: connect: connection refused" interval="400ms" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.079191 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.079265 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.079312 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.079351 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.079547 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.079598 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.079679 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.079714 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.079737 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.079752 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.079768 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.079782 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.079797 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.079810 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.079860 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.085831 4605 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.087811 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.087845 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.087855 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.087899 4605 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 01 13:44:38 crc kubenswrapper[4605]: E1001 13:44:38.088537 4605 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.200:6443: connect: connection refused" node="crc" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.180911 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.180974 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.181006 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.181035 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.181065 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.181123 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.181162 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.181196 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.181204 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.181237 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.181236 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.181259 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.181198 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.181328 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.181258 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.181372 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.181276 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.181334 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.181404 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.181387 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.181489 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.181538 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.181570 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.181584 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.181572 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.181604 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.181622 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.181646 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.181686 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.181769 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.289491 4605 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.291206 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.291251 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.291262 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.291291 4605 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 01 13:44:38 crc kubenswrapper[4605]: E1001 13:44:38.291665 4605 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.200:6443: connect: connection refused" node="crc" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.366587 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.377815 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.398967 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.418822 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.427012 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 01 13:44:38 crc kubenswrapper[4605]: W1001 13:44:38.428238 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-ad1ec509aa122b15dae467cbb3f10bef9d820ce07724aea8bb7de1261ab1ee49 WatchSource:0}: Error finding container ad1ec509aa122b15dae467cbb3f10bef9d820ce07724aea8bb7de1261ab1ee49: Status 404 returned error can't find the container with id ad1ec509aa122b15dae467cbb3f10bef9d820ce07724aea8bb7de1261ab1ee49 Oct 01 13:44:38 crc kubenswrapper[4605]: W1001 13:44:38.429030 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-fbad0777230f76db46c225dc7c4add912a617ed3d6e61097ac4bf8bbcaaaf2b6 WatchSource:0}: Error finding container fbad0777230f76db46c225dc7c4add912a617ed3d6e61097ac4bf8bbcaaaf2b6: Status 404 returned error can't find the container with id fbad0777230f76db46c225dc7c4add912a617ed3d6e61097ac4bf8bbcaaaf2b6 Oct 01 13:44:38 crc kubenswrapper[4605]: W1001 13:44:38.445446 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-6327145a2b36df7cedf75f1f6832eead079814d29eb4fd861faa5c3965659e05 WatchSource:0}: Error finding container 6327145a2b36df7cedf75f1f6832eead079814d29eb4fd861faa5c3965659e05: Status 404 returned error can't find the container with id 6327145a2b36df7cedf75f1f6832eead079814d29eb4fd861faa5c3965659e05 Oct 01 13:44:38 crc kubenswrapper[4605]: W1001 13:44:38.446830 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-d34901f4d5247216c660519210d08096319699b93715c90a0a2afe8f404a8d06 WatchSource:0}: Error finding container d34901f4d5247216c660519210d08096319699b93715c90a0a2afe8f404a8d06: Status 404 returned error can't find the container with id d34901f4d5247216c660519210d08096319699b93715c90a0a2afe8f404a8d06 Oct 01 13:44:38 crc kubenswrapper[4605]: W1001 13:44:38.448742 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-24cf2703f2c3b00a00d075f0d1ea1950907d61571a8ef42c4289348443a3e08a WatchSource:0}: Error finding container 24cf2703f2c3b00a00d075f0d1ea1950907d61571a8ef42c4289348443a3e08a: Status 404 returned error can't find the container with id 24cf2703f2c3b00a00d075f0d1ea1950907d61571a8ef42c4289348443a3e08a Oct 01 13:44:38 crc kubenswrapper[4605]: E1001 13:44:38.458601 4605 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.200:6443: connect: connection refused" interval="800ms" Oct 01 13:44:38 crc kubenswrapper[4605]: W1001 13:44:38.662133 4605 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.200:6443: connect: connection refused Oct 01 13:44:38 crc kubenswrapper[4605]: E1001 13:44:38.662274 4605 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.200:6443: connect: connection refused" logger="UnhandledError" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.692156 4605 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.693599 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.693651 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.693667 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.693710 4605 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 01 13:44:38 crc kubenswrapper[4605]: E1001 13:44:38.694211 4605 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.200:6443: connect: connection refused" node="crc" Oct 01 13:44:38 crc kubenswrapper[4605]: W1001 13:44:38.770079 4605 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.200:6443: connect: connection refused Oct 01 13:44:38 crc kubenswrapper[4605]: E1001 13:44:38.770239 4605 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.200:6443: connect: connection refused" logger="UnhandledError" Oct 01 13:44:38 crc kubenswrapper[4605]: W1001 13:44:38.801115 4605 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.200:6443: connect: connection refused Oct 01 13:44:38 crc kubenswrapper[4605]: E1001 13:44:38.801226 4605 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.200:6443: connect: connection refused" logger="UnhandledError" Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.840429 4605 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.200:6443: connect: connection refused Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.936201 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"d34901f4d5247216c660519210d08096319699b93715c90a0a2afe8f404a8d06"} Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.937145 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"6327145a2b36df7cedf75f1f6832eead079814d29eb4fd861faa5c3965659e05"} Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.937996 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"ad1ec509aa122b15dae467cbb3f10bef9d820ce07724aea8bb7de1261ab1ee49"} Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.939421 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"fbad0777230f76db46c225dc7c4add912a617ed3d6e61097ac4bf8bbcaaaf2b6"} Oct 01 13:44:38 crc kubenswrapper[4605]: I1001 13:44:38.940379 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"24cf2703f2c3b00a00d075f0d1ea1950907d61571a8ef42c4289348443a3e08a"} Oct 01 13:44:39 crc kubenswrapper[4605]: E1001 13:44:39.259264 4605 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.200:6443: connect: connection refused" interval="1.6s" Oct 01 13:44:39 crc kubenswrapper[4605]: W1001 13:44:39.361497 4605 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.200:6443: connect: connection refused Oct 01 13:44:39 crc kubenswrapper[4605]: E1001 13:44:39.361606 4605 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.200:6443: connect: connection refused" logger="UnhandledError" Oct 01 13:44:39 crc kubenswrapper[4605]: I1001 13:44:39.495219 4605 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 13:44:39 crc kubenswrapper[4605]: I1001 13:44:39.497571 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:44:39 crc kubenswrapper[4605]: I1001 13:44:39.497842 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:44:39 crc kubenswrapper[4605]: I1001 13:44:39.498068 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:44:39 crc kubenswrapper[4605]: I1001 13:44:39.498397 4605 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 01 13:44:39 crc kubenswrapper[4605]: E1001 13:44:39.499839 4605 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.200:6443: connect: connection refused" node="crc" Oct 01 13:44:39 crc kubenswrapper[4605]: I1001 13:44:39.841056 4605 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.200:6443: connect: connection refused Oct 01 13:44:39 crc kubenswrapper[4605]: I1001 13:44:39.945606 4605 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="6c242e74e563565d5bc79fc82e724418275db188b6a103acf2029813c4bf9e1a" exitCode=0 Oct 01 13:44:39 crc kubenswrapper[4605]: I1001 13:44:39.945756 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"6c242e74e563565d5bc79fc82e724418275db188b6a103acf2029813c4bf9e1a"} Oct 01 13:44:39 crc kubenswrapper[4605]: I1001 13:44:39.945802 4605 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 13:44:39 crc kubenswrapper[4605]: I1001 13:44:39.946935 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:44:39 crc kubenswrapper[4605]: I1001 13:44:39.946979 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:44:39 crc kubenswrapper[4605]: I1001 13:44:39.946994 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:44:39 crc kubenswrapper[4605]: I1001 13:44:39.948200 4605 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="2705745b03d5e0b8fdb38b0828944ce25553b45328d0ffac4762c3007c37a376" exitCode=0 Oct 01 13:44:39 crc kubenswrapper[4605]: I1001 13:44:39.948232 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"2705745b03d5e0b8fdb38b0828944ce25553b45328d0ffac4762c3007c37a376"} Oct 01 13:44:39 crc kubenswrapper[4605]: I1001 13:44:39.948371 4605 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 13:44:39 crc kubenswrapper[4605]: I1001 13:44:39.950046 4605 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="999514f0684159951a01a13ca129351f74f74bc0bcb46ecdf3d3ed736cff06d9" exitCode=0 Oct 01 13:44:39 crc kubenswrapper[4605]: I1001 13:44:39.950137 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"999514f0684159951a01a13ca129351f74f74bc0bcb46ecdf3d3ed736cff06d9"} Oct 01 13:44:39 crc kubenswrapper[4605]: I1001 13:44:39.950234 4605 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 13:44:39 crc kubenswrapper[4605]: I1001 13:44:39.950237 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:44:39 crc kubenswrapper[4605]: I1001 13:44:39.950352 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:44:39 crc kubenswrapper[4605]: I1001 13:44:39.950363 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:44:39 crc kubenswrapper[4605]: I1001 13:44:39.952183 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:44:39 crc kubenswrapper[4605]: I1001 13:44:39.952214 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:44:39 crc kubenswrapper[4605]: I1001 13:44:39.952227 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:44:39 crc kubenswrapper[4605]: I1001 13:44:39.952912 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"671dc002b0ca1a50b36373cbf0a8971b0f751989c9f19acedb524b09afd53517"} Oct 01 13:44:39 crc kubenswrapper[4605]: I1001 13:44:39.952949 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"acf6d9c6b834cf378303c7ee6e1af3f3cde2502d8f28a6e5d3ec33deb69434b3"} Oct 01 13:44:39 crc kubenswrapper[4605]: I1001 13:44:39.952963 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"f4200a3723d31af3d800fca144949b047d3ef2960d856f286899351523593061"} Oct 01 13:44:39 crc kubenswrapper[4605]: I1001 13:44:39.955649 4605 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891" exitCode=0 Oct 01 13:44:39 crc kubenswrapper[4605]: I1001 13:44:39.955679 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891"} Oct 01 13:44:39 crc kubenswrapper[4605]: I1001 13:44:39.955732 4605 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 13:44:39 crc kubenswrapper[4605]: I1001 13:44:39.956745 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:44:39 crc kubenswrapper[4605]: I1001 13:44:39.956768 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:44:39 crc kubenswrapper[4605]: I1001 13:44:39.956777 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:44:39 crc kubenswrapper[4605]: I1001 13:44:39.973194 4605 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 13:44:39 crc kubenswrapper[4605]: I1001 13:44:39.974307 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:44:39 crc kubenswrapper[4605]: I1001 13:44:39.974329 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:44:39 crc kubenswrapper[4605]: I1001 13:44:39.974338 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:44:40 crc kubenswrapper[4605]: W1001 13:44:40.533326 4605 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.200:6443: connect: connection refused Oct 01 13:44:40 crc kubenswrapper[4605]: E1001 13:44:40.533456 4605 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.200:6443: connect: connection refused" logger="UnhandledError" Oct 01 13:44:40 crc kubenswrapper[4605]: I1001 13:44:40.840824 4605 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.200:6443: connect: connection refused Oct 01 13:44:40 crc kubenswrapper[4605]: E1001 13:44:40.861111 4605 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.200:6443: connect: connection refused" interval="3.2s" Oct 01 13:44:40 crc kubenswrapper[4605]: I1001 13:44:40.960923 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"654d8773e77246882d4e9aefa0d27a3f40f8555aa15250a9a13bca821f716287"} Oct 01 13:44:40 crc kubenswrapper[4605]: I1001 13:44:40.961035 4605 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 13:44:40 crc kubenswrapper[4605]: I1001 13:44:40.962459 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:44:40 crc kubenswrapper[4605]: I1001 13:44:40.962504 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:44:40 crc kubenswrapper[4605]: I1001 13:44:40.962518 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:44:40 crc kubenswrapper[4605]: I1001 13:44:40.966201 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"6ab08a748b9c3040ea1af963f8ebeef630d7fb260122baba05229615424850d6"} Oct 01 13:44:40 crc kubenswrapper[4605]: I1001 13:44:40.966235 4605 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 13:44:40 crc kubenswrapper[4605]: I1001 13:44:40.969437 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:44:40 crc kubenswrapper[4605]: I1001 13:44:40.969480 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:44:40 crc kubenswrapper[4605]: I1001 13:44:40.969492 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:44:40 crc kubenswrapper[4605]: I1001 13:44:40.971567 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"113f3599c725570c4484ed005921c30a43db5ffec24b72907c6c7546453fe363"} Oct 01 13:44:40 crc kubenswrapper[4605]: I1001 13:44:40.971669 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"5d0dbfcc18f63ddc136fc049c023f326cb882adbcf1123f1ec5d4b884ae3970a"} Oct 01 13:44:40 crc kubenswrapper[4605]: I1001 13:44:40.971691 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"ed56fd63f436435206d3574740efba75e8a94582fe128388e03b398131acd4ca"} Oct 01 13:44:40 crc kubenswrapper[4605]: I1001 13:44:40.971703 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"8c15c835868f9e04648db64fcf271a4c212d406e7bcc7cfca15167b853a02a70"} Oct 01 13:44:40 crc kubenswrapper[4605]: I1001 13:44:40.973894 4605 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="8899b503c98367e1c6fe0437650168e8d06f7a8bbf141b79b9ba944383a65ee6" exitCode=0 Oct 01 13:44:40 crc kubenswrapper[4605]: I1001 13:44:40.973939 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"8899b503c98367e1c6fe0437650168e8d06f7a8bbf141b79b9ba944383a65ee6"} Oct 01 13:44:40 crc kubenswrapper[4605]: I1001 13:44:40.974057 4605 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 13:44:40 crc kubenswrapper[4605]: I1001 13:44:40.974904 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:44:40 crc kubenswrapper[4605]: I1001 13:44:40.974931 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:44:40 crc kubenswrapper[4605]: I1001 13:44:40.974947 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:44:40 crc kubenswrapper[4605]: I1001 13:44:40.977379 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"e397c3307f660a4316c953e5b4a6440a8fd584b7d06b514833a2514c99f5d22b"} Oct 01 13:44:40 crc kubenswrapper[4605]: I1001 13:44:40.977417 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"cb5327594e9731b96a498f5d9b0c5011693e2c7ef4af097649f9a4fce12aaf24"} Oct 01 13:44:40 crc kubenswrapper[4605]: I1001 13:44:40.977430 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"0c3d4d08023677ab885a54e3844be97affb4cf2b7629495bd3ff5c2ca2ea572e"} Oct 01 13:44:40 crc kubenswrapper[4605]: I1001 13:44:40.977441 4605 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 13:44:40 crc kubenswrapper[4605]: I1001 13:44:40.978131 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:44:40 crc kubenswrapper[4605]: I1001 13:44:40.978163 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:44:40 crc kubenswrapper[4605]: I1001 13:44:40.978180 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:44:40 crc kubenswrapper[4605]: W1001 13:44:40.984133 4605 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.200:6443: connect: connection refused Oct 01 13:44:40 crc kubenswrapper[4605]: E1001 13:44:40.984259 4605 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.200:6443: connect: connection refused" logger="UnhandledError" Oct 01 13:44:41 crc kubenswrapper[4605]: W1001 13:44:41.082258 4605 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.200:6443: connect: connection refused Oct 01 13:44:41 crc kubenswrapper[4605]: E1001 13:44:41.082349 4605 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.200:6443: connect: connection refused" logger="UnhandledError" Oct 01 13:44:41 crc kubenswrapper[4605]: I1001 13:44:41.100719 4605 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 13:44:41 crc kubenswrapper[4605]: I1001 13:44:41.102394 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:44:41 crc kubenswrapper[4605]: I1001 13:44:41.102446 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:44:41 crc kubenswrapper[4605]: I1001 13:44:41.102458 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:44:41 crc kubenswrapper[4605]: I1001 13:44:41.102490 4605 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 01 13:44:41 crc kubenswrapper[4605]: E1001 13:44:41.103027 4605 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.200:6443: connect: connection refused" node="crc" Oct 01 13:44:41 crc kubenswrapper[4605]: I1001 13:44:41.987523 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"83498000b4c2e2ea1b498069626fa6916e9fc5e95a3ecf0136b2cb0ca8a409b0"} Oct 01 13:44:41 crc kubenswrapper[4605]: I1001 13:44:41.987590 4605 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 13:44:41 crc kubenswrapper[4605]: I1001 13:44:41.989591 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:44:41 crc kubenswrapper[4605]: I1001 13:44:41.989654 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:44:41 crc kubenswrapper[4605]: I1001 13:44:41.989674 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:44:41 crc kubenswrapper[4605]: I1001 13:44:41.991780 4605 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="156296d89371f013110c31b7c63febb388214a116f628b49fc7002b8ba51c693" exitCode=0 Oct 01 13:44:41 crc kubenswrapper[4605]: I1001 13:44:41.991906 4605 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 13:44:41 crc kubenswrapper[4605]: I1001 13:44:41.991936 4605 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 13:44:41 crc kubenswrapper[4605]: I1001 13:44:41.992691 4605 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 13:44:41 crc kubenswrapper[4605]: I1001 13:44:41.992803 4605 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 13:44:41 crc kubenswrapper[4605]: I1001 13:44:41.992796 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"156296d89371f013110c31b7c63febb388214a116f628b49fc7002b8ba51c693"} Oct 01 13:44:41 crc kubenswrapper[4605]: I1001 13:44:41.993073 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:44:41 crc kubenswrapper[4605]: I1001 13:44:41.993181 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 01 13:44:41 crc kubenswrapper[4605]: I1001 13:44:41.993186 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:44:41 crc kubenswrapper[4605]: I1001 13:44:41.993257 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:44:41 crc kubenswrapper[4605]: I1001 13:44:41.993320 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:44:41 crc kubenswrapper[4605]: I1001 13:44:41.993444 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:44:41 crc kubenswrapper[4605]: I1001 13:44:41.993474 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:44:41 crc kubenswrapper[4605]: I1001 13:44:41.994066 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:44:41 crc kubenswrapper[4605]: I1001 13:44:41.994121 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:44:41 crc kubenswrapper[4605]: I1001 13:44:41.994134 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:44:41 crc kubenswrapper[4605]: I1001 13:44:41.994723 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:44:41 crc kubenswrapper[4605]: I1001 13:44:41.994776 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:44:41 crc kubenswrapper[4605]: I1001 13:44:41.994802 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:44:42 crc kubenswrapper[4605]: I1001 13:44:42.998544 4605 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 01 13:44:42 crc kubenswrapper[4605]: I1001 13:44:42.998613 4605 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 13:44:42 crc kubenswrapper[4605]: I1001 13:44:42.999058 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"56f00fb7df74cb29349df1cde98767484265092e35571c38c2aea0399d22d74c"} Oct 01 13:44:42 crc kubenswrapper[4605]: I1001 13:44:42.999130 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"f335eb93f9a1321ec3182dc31813805a6db54877546423e33b5889918372886a"} Oct 01 13:44:42 crc kubenswrapper[4605]: I1001 13:44:42.999152 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"fb6dc1ff2606cdb8af68a29fe2cbd26e7f180e50a1a0b8bb1086e3fbb0992745"} Oct 01 13:44:42 crc kubenswrapper[4605]: I1001 13:44:42.999201 4605 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 13:44:43 crc kubenswrapper[4605]: I1001 13:44:43.000268 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:44:43 crc kubenswrapper[4605]: I1001 13:44:43.000308 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:44:43 crc kubenswrapper[4605]: I1001 13:44:43.000324 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:44:43 crc kubenswrapper[4605]: I1001 13:44:43.000607 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:44:43 crc kubenswrapper[4605]: I1001 13:44:43.000631 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:44:43 crc kubenswrapper[4605]: I1001 13:44:43.000644 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:44:44 crc kubenswrapper[4605]: I1001 13:44:44.008725 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"d2e33e03b5513b58dd21410a72dbb712d569f6526940275f4911cce8fcec8849"} Oct 01 13:44:44 crc kubenswrapper[4605]: I1001 13:44:44.008808 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"d8beb2ebe555f9e280e134a1ac53679d7a5449ad1945ee15a73d0320bd2d2104"} Oct 01 13:44:44 crc kubenswrapper[4605]: I1001 13:44:44.009124 4605 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 13:44:44 crc kubenswrapper[4605]: I1001 13:44:44.010773 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:44:44 crc kubenswrapper[4605]: I1001 13:44:44.011652 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:44:44 crc kubenswrapper[4605]: I1001 13:44:44.011740 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:44:44 crc kubenswrapper[4605]: I1001 13:44:44.179217 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 01 13:44:44 crc kubenswrapper[4605]: I1001 13:44:44.179437 4605 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 01 13:44:44 crc kubenswrapper[4605]: I1001 13:44:44.179496 4605 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 13:44:44 crc kubenswrapper[4605]: I1001 13:44:44.181057 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:44:44 crc kubenswrapper[4605]: I1001 13:44:44.181347 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:44:44 crc kubenswrapper[4605]: I1001 13:44:44.181499 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:44:44 crc kubenswrapper[4605]: I1001 13:44:44.304000 4605 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 13:44:44 crc kubenswrapper[4605]: I1001 13:44:44.305832 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:44:44 crc kubenswrapper[4605]: I1001 13:44:44.305872 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:44:44 crc kubenswrapper[4605]: I1001 13:44:44.305889 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:44:44 crc kubenswrapper[4605]: I1001 13:44:44.305924 4605 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 01 13:44:45 crc kubenswrapper[4605]: I1001 13:44:45.010580 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 01 13:44:45 crc kubenswrapper[4605]: I1001 13:44:45.012807 4605 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 13:44:45 crc kubenswrapper[4605]: I1001 13:44:45.012860 4605 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 13:44:45 crc kubenswrapper[4605]: I1001 13:44:45.014282 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:44:45 crc kubenswrapper[4605]: I1001 13:44:45.014324 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:44:45 crc kubenswrapper[4605]: I1001 13:44:45.014338 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:44:45 crc kubenswrapper[4605]: I1001 13:44:45.015521 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:44:45 crc kubenswrapper[4605]: I1001 13:44:45.015557 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:44:45 crc kubenswrapper[4605]: I1001 13:44:45.015572 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:44:45 crc kubenswrapper[4605]: I1001 13:44:45.980271 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Oct 01 13:44:46 crc kubenswrapper[4605]: I1001 13:44:46.014803 4605 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 13:44:46 crc kubenswrapper[4605]: I1001 13:44:46.015925 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:44:46 crc kubenswrapper[4605]: I1001 13:44:46.015991 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:44:46 crc kubenswrapper[4605]: I1001 13:44:46.016009 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:44:46 crc kubenswrapper[4605]: I1001 13:44:46.449879 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 01 13:44:46 crc kubenswrapper[4605]: I1001 13:44:46.450220 4605 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 13:44:46 crc kubenswrapper[4605]: I1001 13:44:46.451903 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:44:46 crc kubenswrapper[4605]: I1001 13:44:46.452000 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:44:46 crc kubenswrapper[4605]: I1001 13:44:46.452021 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:44:47 crc kubenswrapper[4605]: I1001 13:44:47.705308 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 01 13:44:47 crc kubenswrapper[4605]: I1001 13:44:47.706139 4605 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 13:44:47 crc kubenswrapper[4605]: I1001 13:44:47.707480 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:44:47 crc kubenswrapper[4605]: I1001 13:44:47.707522 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:44:47 crc kubenswrapper[4605]: I1001 13:44:47.707530 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:44:47 crc kubenswrapper[4605]: E1001 13:44:47.994261 4605 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Oct 01 13:44:48 crc kubenswrapper[4605]: I1001 13:44:48.971521 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Oct 01 13:44:48 crc kubenswrapper[4605]: I1001 13:44:48.971777 4605 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 13:44:48 crc kubenswrapper[4605]: I1001 13:44:48.973470 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:44:48 crc kubenswrapper[4605]: I1001 13:44:48.973520 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:44:48 crc kubenswrapper[4605]: I1001 13:44:48.973541 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:44:48 crc kubenswrapper[4605]: I1001 13:44:48.976287 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 01 13:44:48 crc kubenswrapper[4605]: I1001 13:44:48.976441 4605 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 13:44:48 crc kubenswrapper[4605]: I1001 13:44:48.978002 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:44:48 crc kubenswrapper[4605]: I1001 13:44:48.978149 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:44:48 crc kubenswrapper[4605]: I1001 13:44:48.978187 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:44:48 crc kubenswrapper[4605]: I1001 13:44:48.986069 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 01 13:44:49 crc kubenswrapper[4605]: I1001 13:44:49.004368 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 01 13:44:49 crc kubenswrapper[4605]: I1001 13:44:49.010054 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 01 13:44:49 crc kubenswrapper[4605]: I1001 13:44:49.024583 4605 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 13:44:49 crc kubenswrapper[4605]: I1001 13:44:49.026172 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:44:49 crc kubenswrapper[4605]: I1001 13:44:49.026219 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:44:49 crc kubenswrapper[4605]: I1001 13:44:49.026236 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:44:49 crc kubenswrapper[4605]: I1001 13:44:49.887323 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 01 13:44:50 crc kubenswrapper[4605]: I1001 13:44:50.029641 4605 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 13:44:50 crc kubenswrapper[4605]: I1001 13:44:50.031434 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:44:50 crc kubenswrapper[4605]: I1001 13:44:50.031484 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:44:50 crc kubenswrapper[4605]: I1001 13:44:50.031500 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:44:51 crc kubenswrapper[4605]: I1001 13:44:51.034414 4605 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 13:44:51 crc kubenswrapper[4605]: I1001 13:44:51.036207 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:44:51 crc kubenswrapper[4605]: I1001 13:44:51.036254 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:44:51 crc kubenswrapper[4605]: I1001 13:44:51.036267 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:44:51 crc kubenswrapper[4605]: W1001 13:44:51.767029 4605 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout Oct 01 13:44:51 crc kubenswrapper[4605]: I1001 13:44:51.767138 4605 trace.go:236] Trace[1192013632]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (01-Oct-2025 13:44:41.765) (total time: 10001ms): Oct 01 13:44:51 crc kubenswrapper[4605]: Trace[1192013632]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (13:44:51.767) Oct 01 13:44:51 crc kubenswrapper[4605]: Trace[1192013632]: [10.001687459s] [10.001687459s] END Oct 01 13:44:51 crc kubenswrapper[4605]: E1001 13:44:51.767164 4605 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Oct 01 13:44:51 crc kubenswrapper[4605]: I1001 13:44:51.842508 4605 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Oct 01 13:44:52 crc kubenswrapper[4605]: E1001 13:44:52.130859 4605 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": net/http: TLS handshake timeout" event="&Event{ObjectMeta:{crc.186a61edaa83a92f default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-10-01 13:44:37.837498671 +0000 UTC m=+0.581474889,LastTimestamp:2025-10-01 13:44:37.837498671 +0000 UTC m=+0.581474889,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Oct 01 13:44:52 crc kubenswrapper[4605]: I1001 13:44:52.315979 4605 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Oct 01 13:44:52 crc kubenswrapper[4605]: I1001 13:44:52.316128 4605 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Oct 01 13:44:52 crc kubenswrapper[4605]: I1001 13:44:52.322007 4605 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Oct 01 13:44:52 crc kubenswrapper[4605]: I1001 13:44:52.322176 4605 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Oct 01 13:44:52 crc kubenswrapper[4605]: I1001 13:44:52.887947 4605 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Oct 01 13:44:52 crc kubenswrapper[4605]: I1001 13:44:52.888066 4605 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 01 13:44:54 crc kubenswrapper[4605]: I1001 13:44:54.189632 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 01 13:44:54 crc kubenswrapper[4605]: I1001 13:44:54.189878 4605 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 13:44:54 crc kubenswrapper[4605]: I1001 13:44:54.191656 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:44:54 crc kubenswrapper[4605]: I1001 13:44:54.191705 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:44:54 crc kubenswrapper[4605]: I1001 13:44:54.191720 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:44:54 crc kubenswrapper[4605]: I1001 13:44:54.195599 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 01 13:44:55 crc kubenswrapper[4605]: I1001 13:44:55.043686 4605 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 13:44:55 crc kubenswrapper[4605]: I1001 13:44:55.045013 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:44:55 crc kubenswrapper[4605]: I1001 13:44:55.045080 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:44:55 crc kubenswrapper[4605]: I1001 13:44:55.045140 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:44:56 crc kubenswrapper[4605]: I1001 13:44:56.025659 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Oct 01 13:44:56 crc kubenswrapper[4605]: I1001 13:44:56.025923 4605 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 13:44:56 crc kubenswrapper[4605]: I1001 13:44:56.027598 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:44:56 crc kubenswrapper[4605]: I1001 13:44:56.027650 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:44:56 crc kubenswrapper[4605]: I1001 13:44:56.027664 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:44:56 crc kubenswrapper[4605]: I1001 13:44:56.048134 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Oct 01 13:44:56 crc kubenswrapper[4605]: I1001 13:44:56.048405 4605 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 13:44:56 crc kubenswrapper[4605]: I1001 13:44:56.050234 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:44:56 crc kubenswrapper[4605]: I1001 13:44:56.050380 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:44:56 crc kubenswrapper[4605]: I1001 13:44:56.050461 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:44:57 crc kubenswrapper[4605]: E1001 13:44:57.324458 4605 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="6.4s" Oct 01 13:44:57 crc kubenswrapper[4605]: I1001 13:44:57.329807 4605 trace.go:236] Trace[469973564]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (01-Oct-2025 13:44:44.748) (total time: 12581ms): Oct 01 13:44:57 crc kubenswrapper[4605]: Trace[469973564]: ---"Objects listed" error: 12581ms (13:44:57.329) Oct 01 13:44:57 crc kubenswrapper[4605]: Trace[469973564]: [12.581039279s] [12.581039279s] END Oct 01 13:44:57 crc kubenswrapper[4605]: I1001 13:44:57.329829 4605 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Oct 01 13:44:57 crc kubenswrapper[4605]: I1001 13:44:57.330497 4605 trace.go:236] Trace[1044097459]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (01-Oct-2025 13:44:46.696) (total time: 10634ms): Oct 01 13:44:57 crc kubenswrapper[4605]: Trace[1044097459]: ---"Objects listed" error: 10634ms (13:44:57.330) Oct 01 13:44:57 crc kubenswrapper[4605]: Trace[1044097459]: [10.634210204s] [10.634210204s] END Oct 01 13:44:57 crc kubenswrapper[4605]: I1001 13:44:57.330528 4605 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Oct 01 13:44:57 crc kubenswrapper[4605]: I1001 13:44:57.332742 4605 trace.go:236] Trace[1524313989]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (01-Oct-2025 13:44:46.568) (total time: 10763ms): Oct 01 13:44:57 crc kubenswrapper[4605]: Trace[1524313989]: ---"Objects listed" error: 10763ms (13:44:57.332) Oct 01 13:44:57 crc kubenswrapper[4605]: Trace[1524313989]: [10.763827215s] [10.763827215s] END Oct 01 13:44:57 crc kubenswrapper[4605]: I1001 13:44:57.332774 4605 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Oct 01 13:44:57 crc kubenswrapper[4605]: I1001 13:44:57.334346 4605 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Oct 01 13:44:57 crc kubenswrapper[4605]: E1001 13:44:57.336474 4605 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Oct 01 13:44:57 crc kubenswrapper[4605]: I1001 13:44:57.362917 4605 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Liveness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": EOF" start-of-body= Oct 01 13:44:57 crc kubenswrapper[4605]: I1001 13:44:57.363289 4605 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": EOF" Oct 01 13:44:57 crc kubenswrapper[4605]: I1001 13:44:57.365449 4605 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": EOF" start-of-body= Oct 01 13:44:57 crc kubenswrapper[4605]: I1001 13:44:57.365553 4605 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": EOF" Oct 01 13:44:57 crc kubenswrapper[4605]: I1001 13:44:57.370373 4605 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:42878->192.168.126.11:17697: read: connection reset by peer" start-of-body= Oct 01 13:44:57 crc kubenswrapper[4605]: I1001 13:44:57.370460 4605 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:42878->192.168.126.11:17697: read: connection reset by peer" Oct 01 13:44:57 crc kubenswrapper[4605]: I1001 13:44:57.371353 4605 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Oct 01 13:44:57 crc kubenswrapper[4605]: I1001 13:44:57.371405 4605 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Oct 01 13:44:57 crc kubenswrapper[4605]: I1001 13:44:57.746009 4605 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Oct 01 13:44:57 crc kubenswrapper[4605]: I1001 13:44:57.839188 4605 apiserver.go:52] "Watching apiserver" Oct 01 13:44:57 crc kubenswrapper[4605]: I1001 13:44:57.850929 4605 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Oct 01 13:44:57 crc kubenswrapper[4605]: I1001 13:44:57.853271 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb"] Oct 01 13:44:57 crc kubenswrapper[4605]: I1001 13:44:57.853892 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 01 13:44:57 crc kubenswrapper[4605]: I1001 13:44:57.854220 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 13:44:57 crc kubenswrapper[4605]: I1001 13:44:57.854448 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 01 13:44:57 crc kubenswrapper[4605]: E1001 13:44:57.854419 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 13:44:57 crc kubenswrapper[4605]: I1001 13:44:57.854501 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 01 13:44:57 crc kubenswrapper[4605]: I1001 13:44:57.854686 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:44:57 crc kubenswrapper[4605]: I1001 13:44:57.854823 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 13:44:57 crc kubenswrapper[4605]: E1001 13:44:57.854993 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 13:44:57 crc kubenswrapper[4605]: E1001 13:44:57.855345 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 13:44:57 crc kubenswrapper[4605]: I1001 13:44:57.857889 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Oct 01 13:44:57 crc kubenswrapper[4605]: I1001 13:44:57.858287 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Oct 01 13:44:57 crc kubenswrapper[4605]: I1001 13:44:57.858396 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Oct 01 13:44:57 crc kubenswrapper[4605]: I1001 13:44:57.858492 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Oct 01 13:44:57 crc kubenswrapper[4605]: I1001 13:44:57.858504 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Oct 01 13:44:57 crc kubenswrapper[4605]: I1001 13:44:57.859001 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Oct 01 13:44:57 crc kubenswrapper[4605]: I1001 13:44:57.859259 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Oct 01 13:44:57 crc kubenswrapper[4605]: I1001 13:44:57.859272 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Oct 01 13:44:57 crc kubenswrapper[4605]: I1001 13:44:57.860151 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Oct 01 13:44:57 crc kubenswrapper[4605]: I1001 13:44:57.886899 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 13:44:57 crc kubenswrapper[4605]: I1001 13:44:57.908656 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 13:44:57 crc kubenswrapper[4605]: I1001 13:44:57.928599 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 13:44:57 crc kubenswrapper[4605]: I1001 13:44:57.945481 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 13:44:57 crc kubenswrapper[4605]: I1001 13:44:57.946416 4605 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Oct 01 13:44:57 crc kubenswrapper[4605]: I1001 13:44:57.960452 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 13:44:57 crc kubenswrapper[4605]: I1001 13:44:57.977710 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.004327 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.020746 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.037224 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.037281 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.037310 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.037335 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.037366 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.037394 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.037420 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.037442 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.037464 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.037491 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.037516 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.037541 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.037566 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.037590 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.037614 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.037639 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.037661 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.037688 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.037710 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.037737 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.037764 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.037791 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.037815 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.037861 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.037883 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.037905 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.037941 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.037969 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.037995 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.038020 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.038048 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.038074 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.038137 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.038165 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.038195 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.038223 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.038249 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.038280 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.038302 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.038348 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.038564 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.038589 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.038616 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.038643 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.038672 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.038696 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.038718 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.038740 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.038765 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.038791 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.038777 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.038817 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.038844 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.038874 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.038901 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.038908 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.038974 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.039005 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.039031 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.039066 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.039077 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.039131 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.039172 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.039208 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.039235 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.039261 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.039285 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.039295 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.039312 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.039343 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.039370 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.039395 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.039448 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.039474 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.039507 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.039536 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.039568 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.039600 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.039631 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.039660 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.039713 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.039741 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.039767 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.039799 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.039826 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.039853 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.039879 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.039903 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.039927 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.040087 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.040174 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.040199 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.040223 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.040246 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.040269 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.040295 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.040325 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.040350 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.040375 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.040401 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.040429 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.040645 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.040670 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.040694 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.040720 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.040754 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.040788 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.040823 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.040856 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.040893 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.040927 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.040959 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.040997 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.041026 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.041052 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.041076 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.041132 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.041167 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.041202 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.041235 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.041266 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.041290 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.041314 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.041340 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.041363 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.041386 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.041413 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.041440 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.041465 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.041492 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.041519 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.041544 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.041573 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.041598 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.041623 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.041651 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.041680 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.041705 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.041732 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.041755 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.041780 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.041807 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.041861 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.041898 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.041969 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.042007 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.042045 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.042080 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.042149 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.042184 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.042219 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.042251 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.042282 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.042309 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.042337 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.042365 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.042393 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.042618 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.042645 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.042671 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.042697 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.042721 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.042746 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.042770 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.042798 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.042822 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.042848 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.042879 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.042912 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.042945 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.042977 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.043007 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.043033 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.043060 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.043085 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.043142 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.043170 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.043196 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.043223 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.043250 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.043276 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.043308 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.043334 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.043359 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.043386 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.043414 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.043439 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.043465 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.043490 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.043514 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.043540 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.043565 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.043591 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.043619 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.043648 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.043674 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.043701 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.043726 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.043752 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.043811 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.043860 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.043894 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.043922 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.043952 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.043983 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.044013 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.044042 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.044070 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.044123 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.044168 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.044203 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.044230 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.044258 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.044324 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.044355 4605 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.044377 4605 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.044398 4605 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.051707 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.053605 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.059193 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.059818 4605 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.072747 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.083675 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.087868 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.088825 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.089443 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.103801 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.103802 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.039468 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.039741 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.039767 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.039918 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.040041 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.040362 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.040372 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.040843 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.040836 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.042393 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.042834 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.043222 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.043230 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.043493 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.043794 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.043962 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.044236 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.044650 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.045022 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.045549 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.045739 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.045928 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.046130 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.046348 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.046512 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.046681 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.046847 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.047879 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.048651 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.051419 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.051579 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.051829 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.052038 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.052137 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.052988 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.053160 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.053317 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.053381 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.053544 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.053572 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.053584 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.053926 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.053951 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.054147 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.054272 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.054319 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.054442 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.054514 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.054628 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.054748 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.055584 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.055751 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.056120 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.058141 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.058316 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.058363 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.058478 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.058542 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.058629 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.058840 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.058933 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.059152 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.059222 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.059391 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.059443 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.059654 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.059862 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: E1001 13:44:58.060037 4605 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 01 13:44:58 crc kubenswrapper[4605]: E1001 13:44:58.109149 4605 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 01 13:44:58 crc kubenswrapper[4605]: E1001 13:44:58.109164 4605 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 01 13:44:58 crc kubenswrapper[4605]: E1001 13:44:58.109176 4605 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.109283 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.109755 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.109817 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.109946 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.110025 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.110180 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.110519 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.110789 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.110855 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.111023 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.111066 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.111240 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: E1001 13:44:58.060125 4605 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.060706 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.060795 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.060988 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.061051 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.061253 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.061529 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.062062 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.062444 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.063125 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.063316 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.063358 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.063499 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.063600 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.063720 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.063834 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.063959 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.064161 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.064222 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.064229 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.064430 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.064466 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.064126 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.064599 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.066765 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.066997 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.067425 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.067624 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.067851 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.070450 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.070645 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.073190 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.073417 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.073624 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.073833 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.074030 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.074507 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.075025 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.076021 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.076198 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.076291 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.076355 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.076555 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.076682 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.076961 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.076962 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.077239 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.077254 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.077503 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: E1001 13:44:58.077818 4605 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.080028 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.080367 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.080562 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.081038 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.082041 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.082596 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.083107 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.084141 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.084325 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.084742 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.084940 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.085141 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.085261 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.085523 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.088150 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.088629 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.091360 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.091534 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.091682 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.091918 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.092341 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.094723 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.094947 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.095186 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.095384 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.095688 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.101146 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.101438 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.101570 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.101821 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.103068 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.103327 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.103378 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.103660 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.103730 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.104323 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.105628 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.106211 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.106375 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.106656 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.107129 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.107145 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.107404 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.107573 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.107766 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.108061 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.108250 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: E1001 13:44:58.060554 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 13:44:58.560524841 +0000 UTC m=+21.304501049 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:44:58 crc kubenswrapper[4605]: E1001 13:44:58.111903 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-01 13:44:58.611850815 +0000 UTC m=+21.355827023 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 01 13:44:58 crc kubenswrapper[4605]: E1001 13:44:58.111921 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-01 13:44:58.611912907 +0000 UTC m=+21.355889115 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 13:44:58 crc kubenswrapper[4605]: E1001 13:44:58.112014 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-01 13:44:58.612009089 +0000 UTC m=+21.355985297 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 01 13:44:58 crc kubenswrapper[4605]: E1001 13:44:58.112209 4605 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 01 13:44:58 crc kubenswrapper[4605]: E1001 13:44:58.112224 4605 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 13:44:58 crc kubenswrapper[4605]: E1001 13:44:58.112255 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-01 13:44:58.612247576 +0000 UTC m=+21.356223784 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.112367 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.113240 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.113398 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.115271 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.116665 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.117294 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.117502 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.119521 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.119701 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.119914 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.120166 4605 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="83498000b4c2e2ea1b498069626fa6916e9fc5e95a3ecf0136b2cb0ca8a409b0" exitCode=255 Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.120215 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"83498000b4c2e2ea1b498069626fa6916e9fc5e95a3ecf0136b2cb0ca8a409b0"} Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.120385 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.120462 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.123290 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.126360 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.135369 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.136671 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.146656 4605 scope.go:117] "RemoveContainer" containerID="83498000b4c2e2ea1b498069626fa6916e9fc5e95a3ecf0136b2cb0ca8a409b0" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.147605 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.147639 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.147710 4605 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.147720 4605 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.147729 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.147739 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.147748 4605 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.147756 4605 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.147764 4605 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.147773 4605 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.147782 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.147791 4605 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.147799 4605 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.147808 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.147817 4605 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.147826 4605 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.147834 4605 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.147842 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.147851 4605 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.147858 4605 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.147867 4605 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.147874 4605 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.147883 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.147891 4605 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.147899 4605 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.147907 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.147916 4605 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.147925 4605 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.147934 4605 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.147942 4605 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.147950 4605 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.147960 4605 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.147968 4605 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.147981 4605 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.147991 4605 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.147999 4605 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148007 4605 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148016 4605 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148024 4605 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148032 4605 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148040 4605 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148049 4605 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148057 4605 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148065 4605 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148073 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148082 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148108 4605 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148117 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148125 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148134 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148143 4605 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148152 4605 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148162 4605 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148171 4605 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148179 4605 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148187 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148196 4605 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148204 4605 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148212 4605 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148220 4605 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148228 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148237 4605 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148245 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148254 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148264 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148272 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148281 4605 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148290 4605 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148298 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148307 4605 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148315 4605 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148323 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148332 4605 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148341 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148349 4605 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148357 4605 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148365 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148373 4605 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148383 4605 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148391 4605 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148399 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148407 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148415 4605 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148424 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148433 4605 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148443 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148452 4605 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148459 4605 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148467 4605 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148476 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148484 4605 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148524 4605 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148536 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148546 4605 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148555 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148567 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148576 4605 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148585 4605 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148594 4605 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148603 4605 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148611 4605 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148621 4605 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148630 4605 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148638 4605 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148646 4605 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148655 4605 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148663 4605 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148671 4605 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148679 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148687 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148696 4605 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148705 4605 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148713 4605 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148721 4605 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148730 4605 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148738 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148746 4605 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148754 4605 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148762 4605 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148770 4605 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148778 4605 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148785 4605 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148794 4605 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148802 4605 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148810 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148820 4605 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148828 4605 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148836 4605 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148844 4605 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148853 4605 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148862 4605 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148871 4605 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148879 4605 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148888 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148896 4605 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148904 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148912 4605 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148920 4605 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148929 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148937 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148946 4605 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148956 4605 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148964 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148972 4605 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148981 4605 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148989 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.148997 4605 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.149005 4605 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.149014 4605 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.149022 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.149030 4605 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.149038 4605 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.149045 4605 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.149053 4605 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.149062 4605 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.149077 4605 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.149085 4605 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.149108 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.149118 4605 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.149127 4605 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.149135 4605 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.149143 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.149153 4605 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.149161 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.149169 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.149177 4605 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.149185 4605 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.149193 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.149202 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.149210 4605 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.149218 4605 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.149467 4605 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.149513 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.149580 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.149620 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.149635 4605 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.149645 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.149714 4605 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.149738 4605 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.149780 4605 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.149827 4605 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.149836 4605 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.149862 4605 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.149871 4605 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.149882 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.149890 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.149899 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.149909 4605 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.149937 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.149947 4605 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.149956 4605 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.149964 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.149973 4605 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.168760 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.174941 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.176979 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.177657 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.184589 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.189122 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 01 13:44:58 crc kubenswrapper[4605]: W1001 13:44:58.195488 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37a5e44f_9a88_4405_be8a_b645485e7312.slice/crio-57017f6d1648cf19c2e1c7c25732046ab1e2cfe382fdef024759a216b006da3a WatchSource:0}: Error finding container 57017f6d1648cf19c2e1c7c25732046ab1e2cfe382fdef024759a216b006da3a: Status 404 returned error can't find the container with id 57017f6d1648cf19c2e1c7c25732046ab1e2cfe382fdef024759a216b006da3a Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.200265 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.201200 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.216900 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eda516c-c3a2-4e46-b9c2-b603ebc2d618\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c15c835868f9e04648db64fcf271a4c212d406e7bcc7cfca15167b853a02a70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d0dbfcc18f63ddc136fc049c023f326cb882adbcf1123f1ec5d4b884ae3970a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed56fd63f436435206d3574740efba75e8a94582fe128388e03b398131acd4ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83498000b4c2e2ea1b498069626fa6916e9fc5e95a3ecf0136b2cb0ca8a409b0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83498000b4c2e2ea1b498069626fa6916e9fc5e95a3ecf0136b2cb0ca8a409b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759326292\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759326291\\\\\\\\\\\\\\\" (2025-10-01 12:44:51 +0000 UTC to 2026-10-01 12:44:51 +0000 UTC (now=2025-10-01 13:44:57.347830124 +0000 UTC))\\\\\\\"\\\\nI1001 13:44:57.347872 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1001 13:44:57.347895 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1001 13:44:57.347920 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347949 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347989 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4229714179/tls.crt::/tmp/serving-cert-4229714179/tls.key\\\\\\\"\\\\nI1001 13:44:57.348126 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348139 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1001 13:44:57.348152 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348158 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1001 13:44:57.348154 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1001 13:44:57.348212 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1001 13:44:57.348222 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF1001 13:44:57.350577 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://113f3599c725570c4484ed005921c30a43db5ffec24b72907c6c7546453fe363\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 13:44:58 crc kubenswrapper[4605]: W1001 13:44:58.228497 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd75a4c96_2883_4a0b_bab2_0fab2b6c0b49.slice/crio-9f2389463c93019f5f4753bd1e3aab18015a425f823a40c6dddcdf9fe0a0a599 WatchSource:0}: Error finding container 9f2389463c93019f5f4753bd1e3aab18015a425f823a40c6dddcdf9fe0a0a599: Status 404 returned error can't find the container with id 9f2389463c93019f5f4753bd1e3aab18015a425f823a40c6dddcdf9fe0a0a599 Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.230211 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.241871 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.250756 4605 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.250786 4605 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.253039 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.264673 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.285617 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.655878 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.655971 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.656009 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.656035 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:44:58 crc kubenswrapper[4605]: I1001 13:44:58.656065 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 13:44:58 crc kubenswrapper[4605]: E1001 13:44:58.656120 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 13:44:59.656065108 +0000 UTC m=+22.400041316 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:44:58 crc kubenswrapper[4605]: E1001 13:44:58.656256 4605 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 01 13:44:58 crc kubenswrapper[4605]: E1001 13:44:58.656254 4605 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 01 13:44:58 crc kubenswrapper[4605]: E1001 13:44:58.656365 4605 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 01 13:44:58 crc kubenswrapper[4605]: E1001 13:44:58.656409 4605 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 01 13:44:58 crc kubenswrapper[4605]: E1001 13:44:58.656430 4605 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 13:44:58 crc kubenswrapper[4605]: E1001 13:44:58.656267 4605 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 01 13:44:58 crc kubenswrapper[4605]: E1001 13:44:58.656463 4605 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 01 13:44:58 crc kubenswrapper[4605]: E1001 13:44:58.656477 4605 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 13:44:58 crc kubenswrapper[4605]: E1001 13:44:58.656343 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-01 13:44:59.656323485 +0000 UTC m=+22.400299713 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 01 13:44:58 crc kubenswrapper[4605]: E1001 13:44:58.656520 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-01 13:44:59.6564974 +0000 UTC m=+22.400473608 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 01 13:44:58 crc kubenswrapper[4605]: E1001 13:44:58.656539 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-01 13:44:59.656530831 +0000 UTC m=+22.400507039 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 13:44:58 crc kubenswrapper[4605]: E1001 13:44:58.656558 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-01 13:44:59.656550941 +0000 UTC m=+22.400527149 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.123565 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"9f2389463c93019f5f4753bd1e3aab18015a425f823a40c6dddcdf9fe0a0a599"} Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.125702 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"47c3d55cda00f6c5763662b3f96b4c36f1fb8c220fcd14ff3469b565deac718a"} Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.125775 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"5e6578a529a458eab6242d8f667520303c65bd53ceba7598d4c9680c7a93bace"} Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.125796 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"8840c2d1156836d32dcf8116b59ceab4584f14e030ad585787416d4f0ceb4654"} Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.127905 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"0d3d0d59e6a31efc844c56e1ad43cd326a7b2f1844784f2814469e36394cf377"} Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.127970 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"57017f6d1648cf19c2e1c7c25732046ab1e2cfe382fdef024759a216b006da3a"} Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.129648 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.131438 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"80bd23f5be42dc5714bba2b0f742b8aa9fdd60540db1048054e76c00f356b240"} Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.131742 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.151416 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:44:59Z is after 2025-08-24T17:21:41Z" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.158544 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-6zb6l"] Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.159047 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-6zb6l" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.160999 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.162869 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.163021 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.184736 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c3d55cda00f6c5763662b3f96b4c36f1fb8c220fcd14ff3469b565deac718a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e6578a529a458eab6242d8f667520303c65bd53ceba7598d4c9680c7a93bace\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:44:59Z is after 2025-08-24T17:21:41Z" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.210511 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:44:59Z is after 2025-08-24T17:21:41Z" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.228230 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:44:59Z is after 2025-08-24T17:21:41Z" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.251697 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:44:59Z is after 2025-08-24T17:21:41Z" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.259675 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2jt4h\" (UniqueName: \"kubernetes.io/projected/83630902-b99b-4944-81a4-487e9584e0c0-kube-api-access-2jt4h\") pod \"node-resolver-6zb6l\" (UID: \"83630902-b99b-4944-81a4-487e9584e0c0\") " pod="openshift-dns/node-resolver-6zb6l" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.259750 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/83630902-b99b-4944-81a4-487e9584e0c0-hosts-file\") pod \"node-resolver-6zb6l\" (UID: \"83630902-b99b-4944-81a4-487e9584e0c0\") " pod="openshift-dns/node-resolver-6zb6l" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.309539 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eda516c-c3a2-4e46-b9c2-b603ebc2d618\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c15c835868f9e04648db64fcf271a4c212d406e7bcc7cfca15167b853a02a70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d0dbfcc18f63ddc136fc049c023f326cb882adbcf1123f1ec5d4b884ae3970a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed56fd63f436435206d3574740efba75e8a94582fe128388e03b398131acd4ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83498000b4c2e2ea1b498069626fa6916e9fc5e95a3ecf0136b2cb0ca8a409b0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83498000b4c2e2ea1b498069626fa6916e9fc5e95a3ecf0136b2cb0ca8a409b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759326292\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759326291\\\\\\\\\\\\\\\" (2025-10-01 12:44:51 +0000 UTC to 2026-10-01 12:44:51 +0000 UTC (now=2025-10-01 13:44:57.347830124 +0000 UTC))\\\\\\\"\\\\nI1001 13:44:57.347872 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1001 13:44:57.347895 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1001 13:44:57.347920 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347949 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347989 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4229714179/tls.crt::/tmp/serving-cert-4229714179/tls.key\\\\\\\"\\\\nI1001 13:44:57.348126 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348139 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1001 13:44:57.348152 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348158 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1001 13:44:57.348154 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1001 13:44:57.348212 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1001 13:44:57.348222 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF1001 13:44:57.350577 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://113f3599c725570c4484ed005921c30a43db5ffec24b72907c6c7546453fe363\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:44:59Z is after 2025-08-24T17:21:41Z" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.341232 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:44:59Z is after 2025-08-24T17:21:41Z" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.356012 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:44:59Z is after 2025-08-24T17:21:41Z" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.360889 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/83630902-b99b-4944-81a4-487e9584e0c0-hosts-file\") pod \"node-resolver-6zb6l\" (UID: \"83630902-b99b-4944-81a4-487e9584e0c0\") " pod="openshift-dns/node-resolver-6zb6l" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.360941 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2jt4h\" (UniqueName: \"kubernetes.io/projected/83630902-b99b-4944-81a4-487e9584e0c0-kube-api-access-2jt4h\") pod \"node-resolver-6zb6l\" (UID: \"83630902-b99b-4944-81a4-487e9584e0c0\") " pod="openshift-dns/node-resolver-6zb6l" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.361237 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/83630902-b99b-4944-81a4-487e9584e0c0-hosts-file\") pod \"node-resolver-6zb6l\" (UID: \"83630902-b99b-4944-81a4-487e9584e0c0\") " pod="openshift-dns/node-resolver-6zb6l" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.381361 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eda516c-c3a2-4e46-b9c2-b603ebc2d618\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c15c835868f9e04648db64fcf271a4c212d406e7bcc7cfca15167b853a02a70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d0dbfcc18f63ddc136fc049c023f326cb882adbcf1123f1ec5d4b884ae3970a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed56fd63f436435206d3574740efba75e8a94582fe128388e03b398131acd4ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80bd23f5be42dc5714bba2b0f742b8aa9fdd60540db1048054e76c00f356b240\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83498000b4c2e2ea1b498069626fa6916e9fc5e95a3ecf0136b2cb0ca8a409b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759326292\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759326291\\\\\\\\\\\\\\\" (2025-10-01 12:44:51 +0000 UTC to 2026-10-01 12:44:51 +0000 UTC (now=2025-10-01 13:44:57.347830124 +0000 UTC))\\\\\\\"\\\\nI1001 13:44:57.347872 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1001 13:44:57.347895 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1001 13:44:57.347920 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347949 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347989 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4229714179/tls.crt::/tmp/serving-cert-4229714179/tls.key\\\\\\\"\\\\nI1001 13:44:57.348126 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348139 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1001 13:44:57.348152 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348158 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1001 13:44:57.348154 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1001 13:44:57.348212 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1001 13:44:57.348222 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF1001 13:44:57.350577 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:41Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://113f3599c725570c4484ed005921c30a43db5ffec24b72907c6c7546453fe363\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:44:59Z is after 2025-08-24T17:21:41Z" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.390202 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2jt4h\" (UniqueName: \"kubernetes.io/projected/83630902-b99b-4944-81a4-487e9584e0c0-kube-api-access-2jt4h\") pod \"node-resolver-6zb6l\" (UID: \"83630902-b99b-4944-81a4-487e9584e0c0\") " pod="openshift-dns/node-resolver-6zb6l" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.409035 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:44:59Z is after 2025-08-24T17:21:41Z" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.430483 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:44:59Z is after 2025-08-24T17:21:41Z" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.443760 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6zb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83630902-b99b-4944-81a4-487e9584e0c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jt4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6zb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:44:59Z is after 2025-08-24T17:21:41Z" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.468805 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d3d0d59e6a31efc844c56e1ad43cd326a7b2f1844784f2814469e36394cf377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:44:59Z is after 2025-08-24T17:21:41Z" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.479136 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-6zb6l" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.488725 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c3d55cda00f6c5763662b3f96b4c36f1fb8c220fcd14ff3469b565deac718a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e6578a529a458eab6242d8f667520303c65bd53ceba7598d4c9680c7a93bace\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:44:59Z is after 2025-08-24T17:21:41Z" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.518321 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:44:59Z is after 2025-08-24T17:21:41Z" Oct 01 13:44:59 crc kubenswrapper[4605]: W1001 13:44:59.557125 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod83630902_b99b_4944_81a4_487e9584e0c0.slice/crio-4dfa7f59c0391ae780dbcbc2b3bb8a89b8fd63c7e383ce832eb000bfcfcfb418 WatchSource:0}: Error finding container 4dfa7f59c0391ae780dbcbc2b3bb8a89b8fd63c7e383ce832eb000bfcfcfb418: Status 404 returned error can't find the container with id 4dfa7f59c0391ae780dbcbc2b3bb8a89b8fd63c7e383ce832eb000bfcfcfb418 Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.596376 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-xclfn"] Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.596938 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-zdjh7"] Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.597117 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-wgx5p"] Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.603459 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.603491 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-wgx5p" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.603497 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-xclfn" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.609954 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.610175 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.610385 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.610436 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.610391 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.610505 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.610779 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.610905 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.610936 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.611004 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.611493 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.611517 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.623493 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:44:59Z is after 2025-08-24T17:21:41Z" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.638687 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:44:59Z is after 2025-08-24T17:21:41Z" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.651486 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:44:59Z is after 2025-08-24T17:21:41Z" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.663218 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.663328 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/1c2ca71f-4cb0-4852-927d-af69be5d77f2-os-release\") pod \"multus-wgx5p\" (UID: \"1c2ca71f-4cb0-4852-927d-af69be5d77f2\") " pod="openshift-multus/multus-wgx5p" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.663353 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/1c2ca71f-4cb0-4852-927d-af69be5d77f2-host-run-k8s-cni-cncf-io\") pod \"multus-wgx5p\" (UID: \"1c2ca71f-4cb0-4852-927d-af69be5d77f2\") " pod="openshift-multus/multus-wgx5p" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.663372 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/1c2ca71f-4cb0-4852-927d-af69be5d77f2-host-run-netns\") pod \"multus-wgx5p\" (UID: \"1c2ca71f-4cb0-4852-927d-af69be5d77f2\") " pod="openshift-multus/multus-wgx5p" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.663389 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/1c2ca71f-4cb0-4852-927d-af69be5d77f2-multus-socket-dir-parent\") pod \"multus-wgx5p\" (UID: \"1c2ca71f-4cb0-4852-927d-af69be5d77f2\") " pod="openshift-multus/multus-wgx5p" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.663405 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/1c2ca71f-4cb0-4852-927d-af69be5d77f2-etc-kubernetes\") pod \"multus-wgx5p\" (UID: \"1c2ca71f-4cb0-4852-927d-af69be5d77f2\") " pod="openshift-multus/multus-wgx5p" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.663434 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/f3023060-c8ae-492b-b1cb-a418d9a8e59f-mcd-auth-proxy-config\") pod \"machine-config-daemon-zdjh7\" (UID: \"f3023060-c8ae-492b-b1cb-a418d9a8e59f\") " pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.663488 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/1c2ca71f-4cb0-4852-927d-af69be5d77f2-cnibin\") pod \"multus-wgx5p\" (UID: \"1c2ca71f-4cb0-4852-927d-af69be5d77f2\") " pod="openshift-multus/multus-wgx5p" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.663566 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/b9fc4aae-03cb-458d-83cb-1a3ab9fa9639-tuning-conf-dir\") pod \"multus-additional-cni-plugins-xclfn\" (UID: \"b9fc4aae-03cb-458d-83cb-1a3ab9fa9639\") " pod="openshift-multus/multus-additional-cni-plugins-xclfn" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.663644 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/f3023060-c8ae-492b-b1cb-a418d9a8e59f-proxy-tls\") pod \"machine-config-daemon-zdjh7\" (UID: \"f3023060-c8ae-492b-b1cb-a418d9a8e59f\") " pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.663698 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/1c2ca71f-4cb0-4852-927d-af69be5d77f2-system-cni-dir\") pod \"multus-wgx5p\" (UID: \"1c2ca71f-4cb0-4852-927d-af69be5d77f2\") " pod="openshift-multus/multus-wgx5p" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.663719 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/1c2ca71f-4cb0-4852-927d-af69be5d77f2-host-var-lib-kubelet\") pod \"multus-wgx5p\" (UID: \"1c2ca71f-4cb0-4852-927d-af69be5d77f2\") " pod="openshift-multus/multus-wgx5p" Oct 01 13:44:59 crc kubenswrapper[4605]: E1001 13:44:59.663747 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 13:45:01.663720153 +0000 UTC m=+24.407696351 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.663779 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/1c2ca71f-4cb0-4852-927d-af69be5d77f2-multus-daemon-config\") pod \"multus-wgx5p\" (UID: \"1c2ca71f-4cb0-4852-927d-af69be5d77f2\") " pod="openshift-multus/multus-wgx5p" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.663802 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/f3023060-c8ae-492b-b1cb-a418d9a8e59f-rootfs\") pod \"machine-config-daemon-zdjh7\" (UID: \"f3023060-c8ae-492b-b1cb-a418d9a8e59f\") " pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.663822 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/b9fc4aae-03cb-458d-83cb-1a3ab9fa9639-cnibin\") pod \"multus-additional-cni-plugins-xclfn\" (UID: \"b9fc4aae-03cb-458d-83cb-1a3ab9fa9639\") " pod="openshift-multus/multus-additional-cni-plugins-xclfn" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.663840 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/b9fc4aae-03cb-458d-83cb-1a3ab9fa9639-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-xclfn\" (UID: \"b9fc4aae-03cb-458d-83cb-1a3ab9fa9639\") " pod="openshift-multus/multus-additional-cni-plugins-xclfn" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.663870 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.663888 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.663910 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/b9fc4aae-03cb-458d-83cb-1a3ab9fa9639-system-cni-dir\") pod \"multus-additional-cni-plugins-xclfn\" (UID: \"b9fc4aae-03cb-458d-83cb-1a3ab9fa9639\") " pod="openshift-multus/multus-additional-cni-plugins-xclfn" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.663931 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/1c2ca71f-4cb0-4852-927d-af69be5d77f2-host-var-lib-cni-bin\") pod \"multus-wgx5p\" (UID: \"1c2ca71f-4cb0-4852-927d-af69be5d77f2\") " pod="openshift-multus/multus-wgx5p" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.663948 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/1c2ca71f-4cb0-4852-927d-af69be5d77f2-hostroot\") pod \"multus-wgx5p\" (UID: \"1c2ca71f-4cb0-4852-927d-af69be5d77f2\") " pod="openshift-multus/multus-wgx5p" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.663966 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/1c2ca71f-4cb0-4852-927d-af69be5d77f2-host-run-multus-certs\") pod \"multus-wgx5p\" (UID: \"1c2ca71f-4cb0-4852-927d-af69be5d77f2\") " pod="openshift-multus/multus-wgx5p" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.663983 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/b9fc4aae-03cb-458d-83cb-1a3ab9fa9639-os-release\") pod \"multus-additional-cni-plugins-xclfn\" (UID: \"b9fc4aae-03cb-458d-83cb-1a3ab9fa9639\") " pod="openshift-multus/multus-additional-cni-plugins-xclfn" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.664004 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.664023 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tcms8\" (UniqueName: \"kubernetes.io/projected/b9fc4aae-03cb-458d-83cb-1a3ab9fa9639-kube-api-access-tcms8\") pod \"multus-additional-cni-plugins-xclfn\" (UID: \"b9fc4aae-03cb-458d-83cb-1a3ab9fa9639\") " pod="openshift-multus/multus-additional-cni-plugins-xclfn" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.664041 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/1c2ca71f-4cb0-4852-927d-af69be5d77f2-host-var-lib-cni-multus\") pod \"multus-wgx5p\" (UID: \"1c2ca71f-4cb0-4852-927d-af69be5d77f2\") " pod="openshift-multus/multus-wgx5p" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.664061 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/b9fc4aae-03cb-458d-83cb-1a3ab9fa9639-cni-binary-copy\") pod \"multus-additional-cni-plugins-xclfn\" (UID: \"b9fc4aae-03cb-458d-83cb-1a3ab9fa9639\") " pod="openshift-multus/multus-additional-cni-plugins-xclfn" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.664080 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/1c2ca71f-4cb0-4852-927d-af69be5d77f2-multus-cni-dir\") pod \"multus-wgx5p\" (UID: \"1c2ca71f-4cb0-4852-927d-af69be5d77f2\") " pod="openshift-multus/multus-wgx5p" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.664110 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/1c2ca71f-4cb0-4852-927d-af69be5d77f2-multus-conf-dir\") pod \"multus-wgx5p\" (UID: \"1c2ca71f-4cb0-4852-927d-af69be5d77f2\") " pod="openshift-multus/multus-wgx5p" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.664128 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7gtbc\" (UniqueName: \"kubernetes.io/projected/1c2ca71f-4cb0-4852-927d-af69be5d77f2-kube-api-access-7gtbc\") pod \"multus-wgx5p\" (UID: \"1c2ca71f-4cb0-4852-927d-af69be5d77f2\") " pod="openshift-multus/multus-wgx5p" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.664151 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.664174 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4cqjw\" (UniqueName: \"kubernetes.io/projected/f3023060-c8ae-492b-b1cb-a418d9a8e59f-kube-api-access-4cqjw\") pod \"machine-config-daemon-zdjh7\" (UID: \"f3023060-c8ae-492b-b1cb-a418d9a8e59f\") " pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" Oct 01 13:44:59 crc kubenswrapper[4605]: E1001 13:44:59.664260 4605 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 01 13:44:59 crc kubenswrapper[4605]: E1001 13:44:59.664282 4605 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 01 13:44:59 crc kubenswrapper[4605]: E1001 13:44:59.664300 4605 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 01 13:44:59 crc kubenswrapper[4605]: E1001 13:44:59.664311 4605 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 13:44:59 crc kubenswrapper[4605]: E1001 13:44:59.664349 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-01 13:45:01.66434118 +0000 UTC m=+24.408317388 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 13:44:59 crc kubenswrapper[4605]: E1001 13:44:59.664372 4605 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 01 13:44:59 crc kubenswrapper[4605]: E1001 13:44:59.664386 4605 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.664392 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/1c2ca71f-4cb0-4852-927d-af69be5d77f2-cni-binary-copy\") pod \"multus-wgx5p\" (UID: \"1c2ca71f-4cb0-4852-927d-af69be5d77f2\") " pod="openshift-multus/multus-wgx5p" Oct 01 13:44:59 crc kubenswrapper[4605]: E1001 13:44:59.664397 4605 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 13:44:59 crc kubenswrapper[4605]: E1001 13:44:59.664402 4605 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 01 13:44:59 crc kubenswrapper[4605]: E1001 13:44:59.664463 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-01 13:45:01.664443303 +0000 UTC m=+24.408419511 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 01 13:44:59 crc kubenswrapper[4605]: E1001 13:44:59.664503 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-01 13:45:01.664483654 +0000 UTC m=+24.408459862 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 13:44:59 crc kubenswrapper[4605]: E1001 13:44:59.664532 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-01 13:45:01.664525325 +0000 UTC m=+24.408501533 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.665214 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6zb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83630902-b99b-4944-81a4-487e9584e0c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jt4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6zb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:44:59Z is after 2025-08-24T17:21:41Z" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.677564 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d3d0d59e6a31efc844c56e1ad43cd326a7b2f1844784f2814469e36394cf377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:44:59Z is after 2025-08-24T17:21:41Z" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.708456 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c3d55cda00f6c5763662b3f96b4c36f1fb8c220fcd14ff3469b565deac718a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e6578a529a458eab6242d8f667520303c65bd53ceba7598d4c9680c7a93bace\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:44:59Z is after 2025-08-24T17:21:41Z" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.721440 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:44:59Z is after 2025-08-24T17:21:41Z" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.743812 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3023060-c8ae-492b-b1cb-a418d9a8e59f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdjh7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:44:59Z is after 2025-08-24T17:21:41Z" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.758040 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eda516c-c3a2-4e46-b9c2-b603ebc2d618\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c15c835868f9e04648db64fcf271a4c212d406e7bcc7cfca15167b853a02a70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d0dbfcc18f63ddc136fc049c023f326cb882adbcf1123f1ec5d4b884ae3970a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed56fd63f436435206d3574740efba75e8a94582fe128388e03b398131acd4ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80bd23f5be42dc5714bba2b0f742b8aa9fdd60540db1048054e76c00f356b240\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83498000b4c2e2ea1b498069626fa6916e9fc5e95a3ecf0136b2cb0ca8a409b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759326292\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759326291\\\\\\\\\\\\\\\" (2025-10-01 12:44:51 +0000 UTC to 2026-10-01 12:44:51 +0000 UTC (now=2025-10-01 13:44:57.347830124 +0000 UTC))\\\\\\\"\\\\nI1001 13:44:57.347872 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1001 13:44:57.347895 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1001 13:44:57.347920 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347949 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347989 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4229714179/tls.crt::/tmp/serving-cert-4229714179/tls.key\\\\\\\"\\\\nI1001 13:44:57.348126 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348139 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1001 13:44:57.348152 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348158 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1001 13:44:57.348154 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1001 13:44:57.348212 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1001 13:44:57.348222 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF1001 13:44:57.350577 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:41Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://113f3599c725570c4484ed005921c30a43db5ffec24b72907c6c7546453fe363\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:44:59Z is after 2025-08-24T17:21:41Z" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.765699 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/1c2ca71f-4cb0-4852-927d-af69be5d77f2-host-var-lib-cni-multus\") pod \"multus-wgx5p\" (UID: \"1c2ca71f-4cb0-4852-927d-af69be5d77f2\") " pod="openshift-multus/multus-wgx5p" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.765781 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/1c2ca71f-4cb0-4852-927d-af69be5d77f2-host-var-lib-cni-multus\") pod \"multus-wgx5p\" (UID: \"1c2ca71f-4cb0-4852-927d-af69be5d77f2\") " pod="openshift-multus/multus-wgx5p" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.765835 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/b9fc4aae-03cb-458d-83cb-1a3ab9fa9639-cni-binary-copy\") pod \"multus-additional-cni-plugins-xclfn\" (UID: \"b9fc4aae-03cb-458d-83cb-1a3ab9fa9639\") " pod="openshift-multus/multus-additional-cni-plugins-xclfn" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.765853 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/1c2ca71f-4cb0-4852-927d-af69be5d77f2-multus-cni-dir\") pod \"multus-wgx5p\" (UID: \"1c2ca71f-4cb0-4852-927d-af69be5d77f2\") " pod="openshift-multus/multus-wgx5p" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.766099 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/1c2ca71f-4cb0-4852-927d-af69be5d77f2-multus-conf-dir\") pod \"multus-wgx5p\" (UID: \"1c2ca71f-4cb0-4852-927d-af69be5d77f2\") " pod="openshift-multus/multus-wgx5p" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.766151 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/1c2ca71f-4cb0-4852-927d-af69be5d77f2-multus-cni-dir\") pod \"multus-wgx5p\" (UID: \"1c2ca71f-4cb0-4852-927d-af69be5d77f2\") " pod="openshift-multus/multus-wgx5p" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.766925 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/b9fc4aae-03cb-458d-83cb-1a3ab9fa9639-cni-binary-copy\") pod \"multus-additional-cni-plugins-xclfn\" (UID: \"b9fc4aae-03cb-458d-83cb-1a3ab9fa9639\") " pod="openshift-multus/multus-additional-cni-plugins-xclfn" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.766966 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/1c2ca71f-4cb0-4852-927d-af69be5d77f2-multus-conf-dir\") pod \"multus-wgx5p\" (UID: \"1c2ca71f-4cb0-4852-927d-af69be5d77f2\") " pod="openshift-multus/multus-wgx5p" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.766999 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7gtbc\" (UniqueName: \"kubernetes.io/projected/1c2ca71f-4cb0-4852-927d-af69be5d77f2-kube-api-access-7gtbc\") pod \"multus-wgx5p\" (UID: \"1c2ca71f-4cb0-4852-927d-af69be5d77f2\") " pod="openshift-multus/multus-wgx5p" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.767032 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4cqjw\" (UniqueName: \"kubernetes.io/projected/f3023060-c8ae-492b-b1cb-a418d9a8e59f-kube-api-access-4cqjw\") pod \"machine-config-daemon-zdjh7\" (UID: \"f3023060-c8ae-492b-b1cb-a418d9a8e59f\") " pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.767347 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/1c2ca71f-4cb0-4852-927d-af69be5d77f2-cni-binary-copy\") pod \"multus-wgx5p\" (UID: \"1c2ca71f-4cb0-4852-927d-af69be5d77f2\") " pod="openshift-multus/multus-wgx5p" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.767727 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/1c2ca71f-4cb0-4852-927d-af69be5d77f2-os-release\") pod \"multus-wgx5p\" (UID: \"1c2ca71f-4cb0-4852-927d-af69be5d77f2\") " pod="openshift-multus/multus-wgx5p" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.767835 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/1c2ca71f-4cb0-4852-927d-af69be5d77f2-cni-binary-copy\") pod \"multus-wgx5p\" (UID: \"1c2ca71f-4cb0-4852-927d-af69be5d77f2\") " pod="openshift-multus/multus-wgx5p" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.767369 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/1c2ca71f-4cb0-4852-927d-af69be5d77f2-os-release\") pod \"multus-wgx5p\" (UID: \"1c2ca71f-4cb0-4852-927d-af69be5d77f2\") " pod="openshift-multus/multus-wgx5p" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.767900 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/1c2ca71f-4cb0-4852-927d-af69be5d77f2-host-run-k8s-cni-cncf-io\") pod \"multus-wgx5p\" (UID: \"1c2ca71f-4cb0-4852-927d-af69be5d77f2\") " pod="openshift-multus/multus-wgx5p" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.767917 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/1c2ca71f-4cb0-4852-927d-af69be5d77f2-multus-socket-dir-parent\") pod \"multus-wgx5p\" (UID: \"1c2ca71f-4cb0-4852-927d-af69be5d77f2\") " pod="openshift-multus/multus-wgx5p" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.767932 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/1c2ca71f-4cb0-4852-927d-af69be5d77f2-host-run-netns\") pod \"multus-wgx5p\" (UID: \"1c2ca71f-4cb0-4852-927d-af69be5d77f2\") " pod="openshift-multus/multus-wgx5p" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.767972 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/1c2ca71f-4cb0-4852-927d-af69be5d77f2-host-run-k8s-cni-cncf-io\") pod \"multus-wgx5p\" (UID: \"1c2ca71f-4cb0-4852-927d-af69be5d77f2\") " pod="openshift-multus/multus-wgx5p" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.768001 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/1c2ca71f-4cb0-4852-927d-af69be5d77f2-etc-kubernetes\") pod \"multus-wgx5p\" (UID: \"1c2ca71f-4cb0-4852-927d-af69be5d77f2\") " pod="openshift-multus/multus-wgx5p" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.768023 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/f3023060-c8ae-492b-b1cb-a418d9a8e59f-mcd-auth-proxy-config\") pod \"machine-config-daemon-zdjh7\" (UID: \"f3023060-c8ae-492b-b1cb-a418d9a8e59f\") " pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.768020 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/1c2ca71f-4cb0-4852-927d-af69be5d77f2-multus-socket-dir-parent\") pod \"multus-wgx5p\" (UID: \"1c2ca71f-4cb0-4852-927d-af69be5d77f2\") " pod="openshift-multus/multus-wgx5p" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.768042 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/1c2ca71f-4cb0-4852-927d-af69be5d77f2-host-run-netns\") pod \"multus-wgx5p\" (UID: \"1c2ca71f-4cb0-4852-927d-af69be5d77f2\") " pod="openshift-multus/multus-wgx5p" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.768069 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/1c2ca71f-4cb0-4852-927d-af69be5d77f2-etc-kubernetes\") pod \"multus-wgx5p\" (UID: \"1c2ca71f-4cb0-4852-927d-af69be5d77f2\") " pod="openshift-multus/multus-wgx5p" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.768128 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/1c2ca71f-4cb0-4852-927d-af69be5d77f2-cnibin\") pod \"multus-wgx5p\" (UID: \"1c2ca71f-4cb0-4852-927d-af69be5d77f2\") " pod="openshift-multus/multus-wgx5p" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.768148 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/f3023060-c8ae-492b-b1cb-a418d9a8e59f-proxy-tls\") pod \"machine-config-daemon-zdjh7\" (UID: \"f3023060-c8ae-492b-b1cb-a418d9a8e59f\") " pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.768162 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/1c2ca71f-4cb0-4852-927d-af69be5d77f2-cnibin\") pod \"multus-wgx5p\" (UID: \"1c2ca71f-4cb0-4852-927d-af69be5d77f2\") " pod="openshift-multus/multus-wgx5p" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.768163 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/1c2ca71f-4cb0-4852-927d-af69be5d77f2-system-cni-dir\") pod \"multus-wgx5p\" (UID: \"1c2ca71f-4cb0-4852-927d-af69be5d77f2\") " pod="openshift-multus/multus-wgx5p" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.768254 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/1c2ca71f-4cb0-4852-927d-af69be5d77f2-host-var-lib-kubelet\") pod \"multus-wgx5p\" (UID: \"1c2ca71f-4cb0-4852-927d-af69be5d77f2\") " pod="openshift-multus/multus-wgx5p" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.768322 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/1c2ca71f-4cb0-4852-927d-af69be5d77f2-system-cni-dir\") pod \"multus-wgx5p\" (UID: \"1c2ca71f-4cb0-4852-927d-af69be5d77f2\") " pod="openshift-multus/multus-wgx5p" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.768350 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/1c2ca71f-4cb0-4852-927d-af69be5d77f2-host-var-lib-kubelet\") pod \"multus-wgx5p\" (UID: \"1c2ca71f-4cb0-4852-927d-af69be5d77f2\") " pod="openshift-multus/multus-wgx5p" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.768271 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/1c2ca71f-4cb0-4852-927d-af69be5d77f2-multus-daemon-config\") pod \"multus-wgx5p\" (UID: \"1c2ca71f-4cb0-4852-927d-af69be5d77f2\") " pod="openshift-multus/multus-wgx5p" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.768701 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/b9fc4aae-03cb-458d-83cb-1a3ab9fa9639-tuning-conf-dir\") pod \"multus-additional-cni-plugins-xclfn\" (UID: \"b9fc4aae-03cb-458d-83cb-1a3ab9fa9639\") " pod="openshift-multus/multus-additional-cni-plugins-xclfn" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.768728 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/f3023060-c8ae-492b-b1cb-a418d9a8e59f-rootfs\") pod \"machine-config-daemon-zdjh7\" (UID: \"f3023060-c8ae-492b-b1cb-a418d9a8e59f\") " pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.768751 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/b9fc4aae-03cb-458d-83cb-1a3ab9fa9639-cnibin\") pod \"multus-additional-cni-plugins-xclfn\" (UID: \"b9fc4aae-03cb-458d-83cb-1a3ab9fa9639\") " pod="openshift-multus/multus-additional-cni-plugins-xclfn" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.768769 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/b9fc4aae-03cb-458d-83cb-1a3ab9fa9639-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-xclfn\" (UID: \"b9fc4aae-03cb-458d-83cb-1a3ab9fa9639\") " pod="openshift-multus/multus-additional-cni-plugins-xclfn" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.768802 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/b9fc4aae-03cb-458d-83cb-1a3ab9fa9639-system-cni-dir\") pod \"multus-additional-cni-plugins-xclfn\" (UID: \"b9fc4aae-03cb-458d-83cb-1a3ab9fa9639\") " pod="openshift-multus/multus-additional-cni-plugins-xclfn" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.768821 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/1c2ca71f-4cb0-4852-927d-af69be5d77f2-host-var-lib-cni-bin\") pod \"multus-wgx5p\" (UID: \"1c2ca71f-4cb0-4852-927d-af69be5d77f2\") " pod="openshift-multus/multus-wgx5p" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.768834 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/1c2ca71f-4cb0-4852-927d-af69be5d77f2-hostroot\") pod \"multus-wgx5p\" (UID: \"1c2ca71f-4cb0-4852-927d-af69be5d77f2\") " pod="openshift-multus/multus-wgx5p" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.768853 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/1c2ca71f-4cb0-4852-927d-af69be5d77f2-host-run-multus-certs\") pod \"multus-wgx5p\" (UID: \"1c2ca71f-4cb0-4852-927d-af69be5d77f2\") " pod="openshift-multus/multus-wgx5p" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.768869 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/b9fc4aae-03cb-458d-83cb-1a3ab9fa9639-os-release\") pod \"multus-additional-cni-plugins-xclfn\" (UID: \"b9fc4aae-03cb-458d-83cb-1a3ab9fa9639\") " pod="openshift-multus/multus-additional-cni-plugins-xclfn" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.768891 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tcms8\" (UniqueName: \"kubernetes.io/projected/b9fc4aae-03cb-458d-83cb-1a3ab9fa9639-kube-api-access-tcms8\") pod \"multus-additional-cni-plugins-xclfn\" (UID: \"b9fc4aae-03cb-458d-83cb-1a3ab9fa9639\") " pod="openshift-multus/multus-additional-cni-plugins-xclfn" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.769071 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/1c2ca71f-4cb0-4852-927d-af69be5d77f2-multus-daemon-config\") pod \"multus-wgx5p\" (UID: \"1c2ca71f-4cb0-4852-927d-af69be5d77f2\") " pod="openshift-multus/multus-wgx5p" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.768701 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/f3023060-c8ae-492b-b1cb-a418d9a8e59f-mcd-auth-proxy-config\") pod \"machine-config-daemon-zdjh7\" (UID: \"f3023060-c8ae-492b-b1cb-a418d9a8e59f\") " pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.769145 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/b9fc4aae-03cb-458d-83cb-1a3ab9fa9639-cnibin\") pod \"multus-additional-cni-plugins-xclfn\" (UID: \"b9fc4aae-03cb-458d-83cb-1a3ab9fa9639\") " pod="openshift-multus/multus-additional-cni-plugins-xclfn" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.769161 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/f3023060-c8ae-492b-b1cb-a418d9a8e59f-rootfs\") pod \"machine-config-daemon-zdjh7\" (UID: \"f3023060-c8ae-492b-b1cb-a418d9a8e59f\") " pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.769213 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/1c2ca71f-4cb0-4852-927d-af69be5d77f2-hostroot\") pod \"multus-wgx5p\" (UID: \"1c2ca71f-4cb0-4852-927d-af69be5d77f2\") " pod="openshift-multus/multus-wgx5p" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.769253 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/b9fc4aae-03cb-458d-83cb-1a3ab9fa9639-system-cni-dir\") pod \"multus-additional-cni-plugins-xclfn\" (UID: \"b9fc4aae-03cb-458d-83cb-1a3ab9fa9639\") " pod="openshift-multus/multus-additional-cni-plugins-xclfn" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.769294 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/1c2ca71f-4cb0-4852-927d-af69be5d77f2-host-var-lib-cni-bin\") pod \"multus-wgx5p\" (UID: \"1c2ca71f-4cb0-4852-927d-af69be5d77f2\") " pod="openshift-multus/multus-wgx5p" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.769333 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/1c2ca71f-4cb0-4852-927d-af69be5d77f2-host-run-multus-certs\") pod \"multus-wgx5p\" (UID: \"1c2ca71f-4cb0-4852-927d-af69be5d77f2\") " pod="openshift-multus/multus-wgx5p" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.769388 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/b9fc4aae-03cb-458d-83cb-1a3ab9fa9639-os-release\") pod \"multus-additional-cni-plugins-xclfn\" (UID: \"b9fc4aae-03cb-458d-83cb-1a3ab9fa9639\") " pod="openshift-multus/multus-additional-cni-plugins-xclfn" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.769436 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/b9fc4aae-03cb-458d-83cb-1a3ab9fa9639-tuning-conf-dir\") pod \"multus-additional-cni-plugins-xclfn\" (UID: \"b9fc4aae-03cb-458d-83cb-1a3ab9fa9639\") " pod="openshift-multus/multus-additional-cni-plugins-xclfn" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.769558 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/b9fc4aae-03cb-458d-83cb-1a3ab9fa9639-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-xclfn\" (UID: \"b9fc4aae-03cb-458d-83cb-1a3ab9fa9639\") " pod="openshift-multus/multus-additional-cni-plugins-xclfn" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.772232 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c3d55cda00f6c5763662b3f96b4c36f1fb8c220fcd14ff3469b565deac718a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e6578a529a458eab6242d8f667520303c65bd53ceba7598d4c9680c7a93bace\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:44:59Z is after 2025-08-24T17:21:41Z" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.773013 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/f3023060-c8ae-492b-b1cb-a418d9a8e59f-proxy-tls\") pod \"machine-config-daemon-zdjh7\" (UID: \"f3023060-c8ae-492b-b1cb-a418d9a8e59f\") " pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.785659 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tcms8\" (UniqueName: \"kubernetes.io/projected/b9fc4aae-03cb-458d-83cb-1a3ab9fa9639-kube-api-access-tcms8\") pod \"multus-additional-cni-plugins-xclfn\" (UID: \"b9fc4aae-03cb-458d-83cb-1a3ab9fa9639\") " pod="openshift-multus/multus-additional-cni-plugins-xclfn" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.786682 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7gtbc\" (UniqueName: \"kubernetes.io/projected/1c2ca71f-4cb0-4852-927d-af69be5d77f2-kube-api-access-7gtbc\") pod \"multus-wgx5p\" (UID: \"1c2ca71f-4cb0-4852-927d-af69be5d77f2\") " pod="openshift-multus/multus-wgx5p" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.790651 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4cqjw\" (UniqueName: \"kubernetes.io/projected/f3023060-c8ae-492b-b1cb-a418d9a8e59f-kube-api-access-4cqjw\") pod \"machine-config-daemon-zdjh7\" (UID: \"f3023060-c8ae-492b-b1cb-a418d9a8e59f\") " pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.793918 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:44:59Z is after 2025-08-24T17:21:41Z" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.806307 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:44:59Z is after 2025-08-24T17:21:41Z" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.817176 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3023060-c8ae-492b-b1cb-a418d9a8e59f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdjh7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:44:59Z is after 2025-08-24T17:21:41Z" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.831049 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xclfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9fc4aae-03cb-458d-83cb-1a3ab9fa9639\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xclfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:44:59Z is after 2025-08-24T17:21:41Z" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.843867 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wgx5p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c2ca71f-4cb0-4852-927d-af69be5d77f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7gtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wgx5p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:44:59Z is after 2025-08-24T17:21:41Z" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.863005 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d3d0d59e6a31efc844c56e1ad43cd326a7b2f1844784f2814469e36394cf377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:44:59Z is after 2025-08-24T17:21:41Z" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.878499 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:44:59Z is after 2025-08-24T17:21:41Z" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.892886 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6zb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83630902-b99b-4944-81a4-487e9584e0c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jt4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6zb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:44:59Z is after 2025-08-24T17:21:41Z" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.893296 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.896899 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.913830 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eda516c-c3a2-4e46-b9c2-b603ebc2d618\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c15c835868f9e04648db64fcf271a4c212d406e7bcc7cfca15167b853a02a70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d0dbfcc18f63ddc136fc049c023f326cb882adbcf1123f1ec5d4b884ae3970a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed56fd63f436435206d3574740efba75e8a94582fe128388e03b398131acd4ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80bd23f5be42dc5714bba2b0f742b8aa9fdd60540db1048054e76c00f356b240\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83498000b4c2e2ea1b498069626fa6916e9fc5e95a3ecf0136b2cb0ca8a409b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759326292\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759326291\\\\\\\\\\\\\\\" (2025-10-01 12:44:51 +0000 UTC to 2026-10-01 12:44:51 +0000 UTC (now=2025-10-01 13:44:57.347830124 +0000 UTC))\\\\\\\"\\\\nI1001 13:44:57.347872 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1001 13:44:57.347895 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1001 13:44:57.347920 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347949 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347989 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4229714179/tls.crt::/tmp/serving-cert-4229714179/tls.key\\\\\\\"\\\\nI1001 13:44:57.348126 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348139 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1001 13:44:57.348152 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348158 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1001 13:44:57.348154 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1001 13:44:57.348212 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1001 13:44:57.348222 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF1001 13:44:57.350577 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:41Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://113f3599c725570c4484ed005921c30a43db5ffec24b72907c6c7546453fe363\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:44:59Z is after 2025-08-24T17:21:41Z" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.914892 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.918606 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.925893 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-xclfn" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.925940 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.926004 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 13:44:59 crc kubenswrapper[4605]: E1001 13:44:59.926047 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.925897 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:44:59 crc kubenswrapper[4605]: E1001 13:44:59.926166 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 13:44:59 crc kubenswrapper[4605]: E1001 13:44:59.926271 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 13:44:59 crc kubenswrapper[4605]: W1001 13:44:59.931582 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf3023060_c8ae_492b_b1cb_a418d9a8e59f.slice/crio-2c699e08e99004ee277fdf5586f9e1e91ff3786dfdbe022ef21f4a2286ef2c0f WatchSource:0}: Error finding container 2c699e08e99004ee277fdf5586f9e1e91ff3786dfdbe022ef21f4a2286ef2c0f: Status 404 returned error can't find the container with id 2c699e08e99004ee277fdf5586f9e1e91ff3786dfdbe022ef21f4a2286ef2c0f Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.931666 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:44:59Z is after 2025-08-24T17:21:41Z" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.932131 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.932544 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-wgx5p" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.932915 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.933601 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.934231 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.934827 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.935356 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.935962 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.936607 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.938741 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.939402 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.939881 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.941047 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.941994 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.943218 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.943746 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.944743 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.945315 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.945680 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.946606 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.947272 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.947742 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.948936 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.949407 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.951103 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.951849 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.953191 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.953962 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.954482 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.955560 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.956049 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.957052 4605 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.958649 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.959047 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eda516c-c3a2-4e46-b9c2-b603ebc2d618\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c15c835868f9e04648db64fcf271a4c212d406e7bcc7cfca15167b853a02a70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d0dbfcc18f63ddc136fc049c023f326cb882adbcf1123f1ec5d4b884ae3970a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed56fd63f436435206d3574740efba75e8a94582fe128388e03b398131acd4ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80bd23f5be42dc5714bba2b0f742b8aa9fdd60540db1048054e76c00f356b240\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83498000b4c2e2ea1b498069626fa6916e9fc5e95a3ecf0136b2cb0ca8a409b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759326292\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759326291\\\\\\\\\\\\\\\" (2025-10-01 12:44:51 +0000 UTC to 2026-10-01 12:44:51 +0000 UTC (now=2025-10-01 13:44:57.347830124 +0000 UTC))\\\\\\\"\\\\nI1001 13:44:57.347872 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1001 13:44:57.347895 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1001 13:44:57.347920 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347949 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347989 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4229714179/tls.crt::/tmp/serving-cert-4229714179/tls.key\\\\\\\"\\\\nI1001 13:44:57.348126 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348139 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1001 13:44:57.348152 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348158 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1001 13:44:57.348154 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1001 13:44:57.348212 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1001 13:44:57.348222 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF1001 13:44:57.350577 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:41Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://113f3599c725570c4484ed005921c30a43db5ffec24b72907c6c7546453fe363\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:44:59Z is after 2025-08-24T17:21:41Z" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.963333 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.964313 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.965469 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.967121 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.967877 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.969071 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.969877 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.971346 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.972155 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.972921 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.973990 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.974784 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.975840 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.976480 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.977469 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ca1d91d-0902-4d3a-b66a-a556b5009d8b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acf6d9c6b834cf378303c7ee6e1af3f3cde2502d8f28a6e5d3ec33deb69434b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4200a3723d31af3d800fca144949b047d3ef2960d856f286899351523593061\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://671dc002b0ca1a50b36373cbf0a8971b0f751989c9f19acedb524b09afd53517\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ab08a748b9c3040ea1af963f8ebeef630d7fb260122baba05229615424850d6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:44:59Z is after 2025-08-24T17:21:41Z" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.977989 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.979073 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.979691 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.980711 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.981363 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.981988 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.983243 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.983895 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.986457 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-kzv4p"] Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.987724 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.992043 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.992223 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.992381 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.992416 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.992722 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.992854 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.992903 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Oct 01 13:44:59 crc kubenswrapper[4605]: I1001 13:44:59.997360 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:44:59Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.017513 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c3d55cda00f6c5763662b3f96b4c36f1fb8c220fcd14ff3469b565deac718a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e6578a529a458eab6242d8f667520303c65bd53ceba7598d4c9680c7a93bace\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:00Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.029462 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:00Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.041899 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:00Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.057931 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3023060-c8ae-492b-b1cb-a418d9a8e59f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdjh7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:00Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.071974 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-host-cni-bin\") pod \"ovnkube-node-kzv4p\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.072008 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-ovnkube-script-lib\") pod \"ovnkube-node-kzv4p\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.072030 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-host-cni-netd\") pod \"ovnkube-node-kzv4p\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.072060 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-host-slash\") pod \"ovnkube-node-kzv4p\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.072076 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-ovn-node-metrics-cert\") pod \"ovnkube-node-kzv4p\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.072114 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-host-run-netns\") pod \"ovnkube-node-kzv4p\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.072130 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-host-run-ovn-kubernetes\") pod \"ovnkube-node-kzv4p\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.072146 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-etc-openvswitch\") pod \"ovnkube-node-kzv4p\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.072162 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-kzv4p\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.072192 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-host-kubelet\") pod \"ovnkube-node-kzv4p\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.072210 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-run-systemd\") pod \"ovnkube-node-kzv4p\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.072227 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-run-ovn\") pod \"ovnkube-node-kzv4p\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.072241 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-env-overrides\") pod \"ovnkube-node-kzv4p\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.072256 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-node-log\") pod \"ovnkube-node-kzv4p\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.072272 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kmn8q\" (UniqueName: \"kubernetes.io/projected/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-kube-api-access-kmn8q\") pod \"ovnkube-node-kzv4p\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.072289 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-systemd-units\") pod \"ovnkube-node-kzv4p\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.072304 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-var-lib-openvswitch\") pod \"ovnkube-node-kzv4p\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.072334 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-run-openvswitch\") pod \"ovnkube-node-kzv4p\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.072350 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-ovnkube-config\") pod \"ovnkube-node-kzv4p\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.072364 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-log-socket\") pod \"ovnkube-node-kzv4p\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.076458 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xclfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9fc4aae-03cb-458d-83cb-1a3ab9fa9639\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xclfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:00Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.095686 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wgx5p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c2ca71f-4cb0-4852-927d-af69be5d77f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7gtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wgx5p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:00Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.118932 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d3d0d59e6a31efc844c56e1ad43cd326a7b2f1844784f2814469e36394cf377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:00Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.138286 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-wgx5p" event={"ID":"1c2ca71f-4cb0-4852-927d-af69be5d77f2","Type":"ContainerStarted","Data":"b1473c91de1a04d9a450818e3ca63e830c5c2689641bda7dfe4a0a898acc056b"} Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.139200 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-xclfn" event={"ID":"b9fc4aae-03cb-458d-83cb-1a3ab9fa9639","Type":"ContainerStarted","Data":"ac90dd0a20d813da4ac01a9ed7612f0b84148a90774274732cc661b0710521df"} Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.142152 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" event={"ID":"f3023060-c8ae-492b-b1cb-a418d9a8e59f","Type":"ContainerStarted","Data":"ae5b8e3f4bd159c632b04545707c7140ba6fcee21a3a3847d5e7f2b9e41b9178"} Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.142202 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" event={"ID":"f3023060-c8ae-492b-b1cb-a418d9a8e59f","Type":"ContainerStarted","Data":"2c699e08e99004ee277fdf5586f9e1e91ff3786dfdbe022ef21f4a2286ef2c0f"} Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.145194 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-6zb6l" event={"ID":"83630902-b99b-4944-81a4-487e9584e0c0","Type":"ContainerStarted","Data":"b0762e1125229327b00202fd05bc17fd641b76f2421e20d0672b3e2d3b0f7538"} Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.145259 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-6zb6l" event={"ID":"83630902-b99b-4944-81a4-487e9584e0c0","Type":"ContainerStarted","Data":"4dfa7f59c0391ae780dbcbc2b3bb8a89b8fd63c7e383ce832eb000bfcfcfb418"} Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.150488 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:00Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.162053 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6zb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83630902-b99b-4944-81a4-487e9584e0c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jt4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6zb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:00Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.173024 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-host-slash\") pod \"ovnkube-node-kzv4p\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.173111 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-ovn-node-metrics-cert\") pod \"ovnkube-node-kzv4p\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.173142 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-host-run-netns\") pod \"ovnkube-node-kzv4p\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.173150 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-host-slash\") pod \"ovnkube-node-kzv4p\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.173171 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-host-run-ovn-kubernetes\") pod \"ovnkube-node-kzv4p\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.173221 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-host-run-ovn-kubernetes\") pod \"ovnkube-node-kzv4p\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.173241 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-etc-openvswitch\") pod \"ovnkube-node-kzv4p\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.173265 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-kzv4p\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.173294 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-host-kubelet\") pod \"ovnkube-node-kzv4p\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.173310 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-run-systemd\") pod \"ovnkube-node-kzv4p\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.173326 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-run-ovn\") pod \"ovnkube-node-kzv4p\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.173342 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-env-overrides\") pod \"ovnkube-node-kzv4p\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.173359 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-node-log\") pod \"ovnkube-node-kzv4p\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.173380 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kmn8q\" (UniqueName: \"kubernetes.io/projected/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-kube-api-access-kmn8q\") pod \"ovnkube-node-kzv4p\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.173420 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-systemd-units\") pod \"ovnkube-node-kzv4p\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.173435 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-var-lib-openvswitch\") pod \"ovnkube-node-kzv4p\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.173455 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-run-openvswitch\") pod \"ovnkube-node-kzv4p\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.173469 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-ovnkube-config\") pod \"ovnkube-node-kzv4p\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.173504 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-log-socket\") pod \"ovnkube-node-kzv4p\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.173532 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-host-cni-bin\") pod \"ovnkube-node-kzv4p\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.173549 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-ovnkube-script-lib\") pod \"ovnkube-node-kzv4p\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.173568 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-host-cni-netd\") pod \"ovnkube-node-kzv4p\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.173618 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-host-cni-netd\") pod \"ovnkube-node-kzv4p\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.173640 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-etc-openvswitch\") pod \"ovnkube-node-kzv4p\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.173663 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-kzv4p\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.173685 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-host-kubelet\") pod \"ovnkube-node-kzv4p\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.173705 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-run-systemd\") pod \"ovnkube-node-kzv4p\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.173724 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-run-ovn\") pod \"ovnkube-node-kzv4p\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.173932 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-host-run-netns\") pod \"ovnkube-node-kzv4p\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.173988 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-run-openvswitch\") pod \"ovnkube-node-kzv4p\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.174019 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-node-log\") pod \"ovnkube-node-kzv4p\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.174226 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-env-overrides\") pod \"ovnkube-node-kzv4p\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.174378 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-systemd-units\") pod \"ovnkube-node-kzv4p\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.174411 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-var-lib-openvswitch\") pod \"ovnkube-node-kzv4p\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.174436 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-log-socket\") pod \"ovnkube-node-kzv4p\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.174677 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-ovnkube-config\") pod \"ovnkube-node-kzv4p\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.174710 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-host-cni-bin\") pod \"ovnkube-node-kzv4p\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.175195 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-ovnkube-script-lib\") pod \"ovnkube-node-kzv4p\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.178673 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6zb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83630902-b99b-4944-81a4-487e9584e0c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0762e1125229327b00202fd05bc17fd641b76f2421e20d0672b3e2d3b0f7538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jt4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6zb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:00Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.179081 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-ovn-node-metrics-cert\") pod \"ovnkube-node-kzv4p\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.194645 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kmn8q\" (UniqueName: \"kubernetes.io/projected/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-kube-api-access-kmn8q\") pod \"ovnkube-node-kzv4p\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.199622 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d3d0d59e6a31efc844c56e1ad43cd326a7b2f1844784f2814469e36394cf377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:00Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.223138 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:00Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.236934 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ca1d91d-0902-4d3a-b66a-a556b5009d8b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acf6d9c6b834cf378303c7ee6e1af3f3cde2502d8f28a6e5d3ec33deb69434b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4200a3723d31af3d800fca144949b047d3ef2960d856f286899351523593061\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://671dc002b0ca1a50b36373cbf0a8971b0f751989c9f19acedb524b09afd53517\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ab08a748b9c3040ea1af963f8ebeef630d7fb260122baba05229615424850d6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:00Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.253059 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:00Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.270117 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eda516c-c3a2-4e46-b9c2-b603ebc2d618\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c15c835868f9e04648db64fcf271a4c212d406e7bcc7cfca15167b853a02a70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d0dbfcc18f63ddc136fc049c023f326cb882adbcf1123f1ec5d4b884ae3970a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed56fd63f436435206d3574740efba75e8a94582fe128388e03b398131acd4ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80bd23f5be42dc5714bba2b0f742b8aa9fdd60540db1048054e76c00f356b240\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83498000b4c2e2ea1b498069626fa6916e9fc5e95a3ecf0136b2cb0ca8a409b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759326292\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759326291\\\\\\\\\\\\\\\" (2025-10-01 12:44:51 +0000 UTC to 2026-10-01 12:44:51 +0000 UTC (now=2025-10-01 13:44:57.347830124 +0000 UTC))\\\\\\\"\\\\nI1001 13:44:57.347872 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1001 13:44:57.347895 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1001 13:44:57.347920 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347949 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347989 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4229714179/tls.crt::/tmp/serving-cert-4229714179/tls.key\\\\\\\"\\\\nI1001 13:44:57.348126 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348139 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1001 13:44:57.348152 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348158 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1001 13:44:57.348154 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1001 13:44:57.348212 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1001 13:44:57.348222 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF1001 13:44:57.350577 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:41Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://113f3599c725570c4484ed005921c30a43db5ffec24b72907c6c7546453fe363\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:00Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.287298 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:00Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.307722 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c3d55cda00f6c5763662b3f96b4c36f1fb8c220fcd14ff3469b565deac718a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e6578a529a458eab6242d8f667520303c65bd53ceba7598d4c9680c7a93bace\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:00Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.331778 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:00Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.349945 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wgx5p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c2ca71f-4cb0-4852-927d-af69be5d77f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7gtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wgx5p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:00Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.353573 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" Oct 01 13:45:00 crc kubenswrapper[4605]: W1001 13:45:00.373682 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode0b90c02_c41c_4f5b_ae0a_c6444435a3ae.slice/crio-7a49db42d9607f1121da89e018d193ad2e829f595e9c5a64ccf244533ed09162 WatchSource:0}: Error finding container 7a49db42d9607f1121da89e018d193ad2e829f595e9c5a64ccf244533ed09162: Status 404 returned error can't find the container with id 7a49db42d9607f1121da89e018d193ad2e829f595e9c5a64ccf244533ed09162 Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.374912 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-kzv4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:00Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.393565 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3023060-c8ae-492b-b1cb-a418d9a8e59f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdjh7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:00Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:00 crc kubenswrapper[4605]: I1001 13:45:00.425790 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xclfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9fc4aae-03cb-458d-83cb-1a3ab9fa9639\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xclfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:00Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:01 crc kubenswrapper[4605]: I1001 13:45:01.150165 4605 generic.go:334] "Generic (PLEG): container finished" podID="e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" containerID="a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6" exitCode=0 Oct 01 13:45:01 crc kubenswrapper[4605]: I1001 13:45:01.150266 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" event={"ID":"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae","Type":"ContainerDied","Data":"a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6"} Oct 01 13:45:01 crc kubenswrapper[4605]: I1001 13:45:01.150307 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" event={"ID":"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae","Type":"ContainerStarted","Data":"7a49db42d9607f1121da89e018d193ad2e829f595e9c5a64ccf244533ed09162"} Oct 01 13:45:01 crc kubenswrapper[4605]: I1001 13:45:01.151912 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"64e2eda677e75448f2e7e3fd477052f3a596e6c11d745848becc6c22f133e6b9"} Oct 01 13:45:01 crc kubenswrapper[4605]: I1001 13:45:01.155462 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" event={"ID":"f3023060-c8ae-492b-b1cb-a418d9a8e59f","Type":"ContainerStarted","Data":"6bd1361d1d9cb03a3942918266a1e85d3e370eabdfa7b7b1e40971995928187b"} Oct 01 13:45:01 crc kubenswrapper[4605]: I1001 13:45:01.157143 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-wgx5p" event={"ID":"1c2ca71f-4cb0-4852-927d-af69be5d77f2","Type":"ContainerStarted","Data":"1180cac382ded8ae1a7be2e5738d96beceed10f750d31e36ae1520416a71e8dd"} Oct 01 13:45:01 crc kubenswrapper[4605]: I1001 13:45:01.158772 4605 generic.go:334] "Generic (PLEG): container finished" podID="b9fc4aae-03cb-458d-83cb-1a3ab9fa9639" containerID="14d2a7aa9a0027ef28b5ec7b288ce0a5367c53f616ed3e75216d07c9809a86a8" exitCode=0 Oct 01 13:45:01 crc kubenswrapper[4605]: I1001 13:45:01.158810 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-xclfn" event={"ID":"b9fc4aae-03cb-458d-83cb-1a3ab9fa9639","Type":"ContainerDied","Data":"14d2a7aa9a0027ef28b5ec7b288ce0a5367c53f616ed3e75216d07c9809a86a8"} Oct 01 13:45:01 crc kubenswrapper[4605]: I1001 13:45:01.171221 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:01Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:01 crc kubenswrapper[4605]: I1001 13:45:01.204073 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eda516c-c3a2-4e46-b9c2-b603ebc2d618\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c15c835868f9e04648db64fcf271a4c212d406e7bcc7cfca15167b853a02a70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d0dbfcc18f63ddc136fc049c023f326cb882adbcf1123f1ec5d4b884ae3970a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed56fd63f436435206d3574740efba75e8a94582fe128388e03b398131acd4ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80bd23f5be42dc5714bba2b0f742b8aa9fdd60540db1048054e76c00f356b240\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83498000b4c2e2ea1b498069626fa6916e9fc5e95a3ecf0136b2cb0ca8a409b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759326292\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759326291\\\\\\\\\\\\\\\" (2025-10-01 12:44:51 +0000 UTC to 2026-10-01 12:44:51 +0000 UTC (now=2025-10-01 13:44:57.347830124 +0000 UTC))\\\\\\\"\\\\nI1001 13:44:57.347872 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1001 13:44:57.347895 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1001 13:44:57.347920 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347949 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347989 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4229714179/tls.crt::/tmp/serving-cert-4229714179/tls.key\\\\\\\"\\\\nI1001 13:44:57.348126 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348139 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1001 13:44:57.348152 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348158 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1001 13:44:57.348154 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1001 13:44:57.348212 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1001 13:44:57.348222 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF1001 13:44:57.350577 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:41Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://113f3599c725570c4484ed005921c30a43db5ffec24b72907c6c7546453fe363\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:01Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:01 crc kubenswrapper[4605]: I1001 13:45:01.220740 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ca1d91d-0902-4d3a-b66a-a556b5009d8b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acf6d9c6b834cf378303c7ee6e1af3f3cde2502d8f28a6e5d3ec33deb69434b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4200a3723d31af3d800fca144949b047d3ef2960d856f286899351523593061\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://671dc002b0ca1a50b36373cbf0a8971b0f751989c9f19acedb524b09afd53517\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ab08a748b9c3040ea1af963f8ebeef630d7fb260122baba05229615424850d6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:01Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:01 crc kubenswrapper[4605]: I1001 13:45:01.257981 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c3d55cda00f6c5763662b3f96b4c36f1fb8c220fcd14ff3469b565deac718a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e6578a529a458eab6242d8f667520303c65bd53ceba7598d4c9680c7a93bace\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:01Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:01 crc kubenswrapper[4605]: I1001 13:45:01.272633 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:01Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:01 crc kubenswrapper[4605]: I1001 13:45:01.291294 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:01Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:01 crc kubenswrapper[4605]: I1001 13:45:01.304387 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3023060-c8ae-492b-b1cb-a418d9a8e59f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdjh7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:01Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:01 crc kubenswrapper[4605]: I1001 13:45:01.330978 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xclfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9fc4aae-03cb-458d-83cb-1a3ab9fa9639\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xclfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:01Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:01 crc kubenswrapper[4605]: I1001 13:45:01.359316 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wgx5p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c2ca71f-4cb0-4852-927d-af69be5d77f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7gtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wgx5p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:01Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:01 crc kubenswrapper[4605]: I1001 13:45:01.377703 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-kzv4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:01Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:01 crc kubenswrapper[4605]: I1001 13:45:01.390737 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d3d0d59e6a31efc844c56e1ad43cd326a7b2f1844784f2814469e36394cf377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:01Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:01 crc kubenswrapper[4605]: I1001 13:45:01.403466 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:01Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:01 crc kubenswrapper[4605]: I1001 13:45:01.413934 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6zb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83630902-b99b-4944-81a4-487e9584e0c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0762e1125229327b00202fd05bc17fd641b76f2421e20d0672b3e2d3b0f7538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jt4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6zb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:01Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:01 crc kubenswrapper[4605]: I1001 13:45:01.427391 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:01Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:01 crc kubenswrapper[4605]: I1001 13:45:01.444909 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eda516c-c3a2-4e46-b9c2-b603ebc2d618\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c15c835868f9e04648db64fcf271a4c212d406e7bcc7cfca15167b853a02a70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d0dbfcc18f63ddc136fc049c023f326cb882adbcf1123f1ec5d4b884ae3970a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed56fd63f436435206d3574740efba75e8a94582fe128388e03b398131acd4ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80bd23f5be42dc5714bba2b0f742b8aa9fdd60540db1048054e76c00f356b240\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83498000b4c2e2ea1b498069626fa6916e9fc5e95a3ecf0136b2cb0ca8a409b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759326292\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759326291\\\\\\\\\\\\\\\" (2025-10-01 12:44:51 +0000 UTC to 2026-10-01 12:44:51 +0000 UTC (now=2025-10-01 13:44:57.347830124 +0000 UTC))\\\\\\\"\\\\nI1001 13:44:57.347872 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1001 13:44:57.347895 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1001 13:44:57.347920 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347949 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347989 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4229714179/tls.crt::/tmp/serving-cert-4229714179/tls.key\\\\\\\"\\\\nI1001 13:44:57.348126 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348139 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1001 13:44:57.348152 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348158 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1001 13:44:57.348154 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1001 13:44:57.348212 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1001 13:44:57.348222 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF1001 13:44:57.350577 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:41Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://113f3599c725570c4484ed005921c30a43db5ffec24b72907c6c7546453fe363\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:01Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:01 crc kubenswrapper[4605]: I1001 13:45:01.459868 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ca1d91d-0902-4d3a-b66a-a556b5009d8b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acf6d9c6b834cf378303c7ee6e1af3f3cde2502d8f28a6e5d3ec33deb69434b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4200a3723d31af3d800fca144949b047d3ef2960d856f286899351523593061\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://671dc002b0ca1a50b36373cbf0a8971b0f751989c9f19acedb524b09afd53517\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ab08a748b9c3040ea1af963f8ebeef630d7fb260122baba05229615424850d6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:01Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:01 crc kubenswrapper[4605]: I1001 13:45:01.479637 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c3d55cda00f6c5763662b3f96b4c36f1fb8c220fcd14ff3469b565deac718a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e6578a529a458eab6242d8f667520303c65bd53ceba7598d4c9680c7a93bace\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:01Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:01 crc kubenswrapper[4605]: I1001 13:45:01.493460 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:01Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:01 crc kubenswrapper[4605]: I1001 13:45:01.506054 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:01Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:01 crc kubenswrapper[4605]: I1001 13:45:01.527688 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-kzv4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:01Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:01 crc kubenswrapper[4605]: I1001 13:45:01.539459 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3023060-c8ae-492b-b1cb-a418d9a8e59f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd1361d1d9cb03a3942918266a1e85d3e370eabdfa7b7b1e40971995928187b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae5b8e3f4bd159c632b04545707c7140ba6fcee21a3a3847d5e7f2b9e41b9178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdjh7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:01Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:01 crc kubenswrapper[4605]: I1001 13:45:01.557282 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xclfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9fc4aae-03cb-458d-83cb-1a3ab9fa9639\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://14d2a7aa9a0027ef28b5ec7b288ce0a5367c53f616ed3e75216d07c9809a86a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14d2a7aa9a0027ef28b5ec7b288ce0a5367c53f616ed3e75216d07c9809a86a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xclfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:01Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:01 crc kubenswrapper[4605]: I1001 13:45:01.574275 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wgx5p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c2ca71f-4cb0-4852-927d-af69be5d77f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1180cac382ded8ae1a7be2e5738d96beceed10f750d31e36ae1520416a71e8dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7gtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wgx5p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:01Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:01 crc kubenswrapper[4605]: I1001 13:45:01.587339 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d3d0d59e6a31efc844c56e1ad43cd326a7b2f1844784f2814469e36394cf377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:01Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:01 crc kubenswrapper[4605]: I1001 13:45:01.601647 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64e2eda677e75448f2e7e3fd477052f3a596e6c11d745848becc6c22f133e6b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:01Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:01 crc kubenswrapper[4605]: I1001 13:45:01.614341 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6zb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83630902-b99b-4944-81a4-487e9584e0c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0762e1125229327b00202fd05bc17fd641b76f2421e20d0672b3e2d3b0f7538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jt4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6zb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:01Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:01 crc kubenswrapper[4605]: I1001 13:45:01.694160 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:45:01 crc kubenswrapper[4605]: I1001 13:45:01.694320 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:45:01 crc kubenswrapper[4605]: E1001 13:45:01.694347 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 13:45:05.694319279 +0000 UTC m=+28.438295497 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:45:01 crc kubenswrapper[4605]: I1001 13:45:01.694382 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:45:01 crc kubenswrapper[4605]: I1001 13:45:01.694423 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 13:45:01 crc kubenswrapper[4605]: E1001 13:45:01.694430 4605 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 01 13:45:01 crc kubenswrapper[4605]: I1001 13:45:01.694459 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 13:45:01 crc kubenswrapper[4605]: E1001 13:45:01.694494 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-01 13:45:05.694478613 +0000 UTC m=+28.438454841 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 01 13:45:01 crc kubenswrapper[4605]: E1001 13:45:01.694606 4605 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 01 13:45:01 crc kubenswrapper[4605]: E1001 13:45:01.694624 4605 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 01 13:45:01 crc kubenswrapper[4605]: E1001 13:45:01.694641 4605 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 13:45:01 crc kubenswrapper[4605]: E1001 13:45:01.694642 4605 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 01 13:45:01 crc kubenswrapper[4605]: E1001 13:45:01.694674 4605 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 01 13:45:01 crc kubenswrapper[4605]: E1001 13:45:01.694720 4605 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 01 13:45:01 crc kubenswrapper[4605]: E1001 13:45:01.694737 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-01 13:45:05.69472301 +0000 UTC m=+28.438699218 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 13:45:01 crc kubenswrapper[4605]: E1001 13:45:01.695118 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-01 13:45:05.694954916 +0000 UTC m=+28.438931124 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 01 13:45:01 crc kubenswrapper[4605]: E1001 13:45:01.696205 4605 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 13:45:01 crc kubenswrapper[4605]: E1001 13:45:01.696423 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-01 13:45:05.696369505 +0000 UTC m=+28.440345713 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 13:45:01 crc kubenswrapper[4605]: I1001 13:45:01.926570 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:45:01 crc kubenswrapper[4605]: I1001 13:45:01.926582 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 13:45:01 crc kubenswrapper[4605]: E1001 13:45:01.927027 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 13:45:01 crc kubenswrapper[4605]: I1001 13:45:01.926618 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 13:45:01 crc kubenswrapper[4605]: E1001 13:45:01.927145 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 13:45:01 crc kubenswrapper[4605]: E1001 13:45:01.927265 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 13:45:02 crc kubenswrapper[4605]: I1001 13:45:02.164588 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-xclfn" event={"ID":"b9fc4aae-03cb-458d-83cb-1a3ab9fa9639","Type":"ContainerStarted","Data":"0abe21aad2433d34c4a769e87e28f3c479511a4580df607d1496ed33c85385c0"} Oct 01 13:45:02 crc kubenswrapper[4605]: I1001 13:45:02.170757 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" event={"ID":"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae","Type":"ContainerStarted","Data":"8f6f39aa4d204b41067d2705ce828e79752ca4e5b7689821e34c98f5041ce925"} Oct 01 13:45:02 crc kubenswrapper[4605]: I1001 13:45:02.170819 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" event={"ID":"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae","Type":"ContainerStarted","Data":"ddba08349bbb0f7040bb04f37d4b5a137de327a5c4a7a5689396a2a1925b062a"} Oct 01 13:45:02 crc kubenswrapper[4605]: I1001 13:45:02.170831 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" event={"ID":"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae","Type":"ContainerStarted","Data":"09ecc57bc8186c325ef0bd38c17882024dda3714b52f9b0295a75ed93b2d6736"} Oct 01 13:45:02 crc kubenswrapper[4605]: I1001 13:45:02.170850 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" event={"ID":"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae","Type":"ContainerStarted","Data":"56f2691df87d1c843a73c06c39c7ad1d5477fb1f10c6f3f797f658e7d0c0d04d"} Oct 01 13:45:02 crc kubenswrapper[4605]: I1001 13:45:02.170860 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" event={"ID":"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae","Type":"ContainerStarted","Data":"98143f416c776bf79ee24b42b70d88d1cbe4528bd7a1c0fc886a8f6e711c6400"} Oct 01 13:45:02 crc kubenswrapper[4605]: I1001 13:45:02.170869 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" event={"ID":"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae","Type":"ContainerStarted","Data":"c8442849a4d37d618e701a2ea4c1dd33944894765c874fd5635c74d37a7f411b"} Oct 01 13:45:02 crc kubenswrapper[4605]: I1001 13:45:02.186593 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-kzv4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:02Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:02 crc kubenswrapper[4605]: I1001 13:45:02.208175 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3023060-c8ae-492b-b1cb-a418d9a8e59f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd1361d1d9cb03a3942918266a1e85d3e370eabdfa7b7b1e40971995928187b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae5b8e3f4bd159c632b04545707c7140ba6fcee21a3a3847d5e7f2b9e41b9178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdjh7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:02Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:02 crc kubenswrapper[4605]: I1001 13:45:02.227167 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xclfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9fc4aae-03cb-458d-83cb-1a3ab9fa9639\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://14d2a7aa9a0027ef28b5ec7b288ce0a5367c53f616ed3e75216d07c9809a86a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14d2a7aa9a0027ef28b5ec7b288ce0a5367c53f616ed3e75216d07c9809a86a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0abe21aad2433d34c4a769e87e28f3c479511a4580df607d1496ed33c85385c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xclfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:02Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:02 crc kubenswrapper[4605]: I1001 13:45:02.245107 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wgx5p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c2ca71f-4cb0-4852-927d-af69be5d77f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1180cac382ded8ae1a7be2e5738d96beceed10f750d31e36ae1520416a71e8dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7gtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wgx5p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:02Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:02 crc kubenswrapper[4605]: I1001 13:45:02.270130 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d3d0d59e6a31efc844c56e1ad43cd326a7b2f1844784f2814469e36394cf377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:02Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:02 crc kubenswrapper[4605]: I1001 13:45:02.282522 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64e2eda677e75448f2e7e3fd477052f3a596e6c11d745848becc6c22f133e6b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:02Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:02 crc kubenswrapper[4605]: I1001 13:45:02.301553 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6zb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83630902-b99b-4944-81a4-487e9584e0c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0762e1125229327b00202fd05bc17fd641b76f2421e20d0672b3e2d3b0f7538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jt4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6zb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:02Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:02 crc kubenswrapper[4605]: I1001 13:45:02.317165 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:02Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:02 crc kubenswrapper[4605]: I1001 13:45:02.332585 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eda516c-c3a2-4e46-b9c2-b603ebc2d618\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c15c835868f9e04648db64fcf271a4c212d406e7bcc7cfca15167b853a02a70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d0dbfcc18f63ddc136fc049c023f326cb882adbcf1123f1ec5d4b884ae3970a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed56fd63f436435206d3574740efba75e8a94582fe128388e03b398131acd4ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80bd23f5be42dc5714bba2b0f742b8aa9fdd60540db1048054e76c00f356b240\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83498000b4c2e2ea1b498069626fa6916e9fc5e95a3ecf0136b2cb0ca8a409b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759326292\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759326291\\\\\\\\\\\\\\\" (2025-10-01 12:44:51 +0000 UTC to 2026-10-01 12:44:51 +0000 UTC (now=2025-10-01 13:44:57.347830124 +0000 UTC))\\\\\\\"\\\\nI1001 13:44:57.347872 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1001 13:44:57.347895 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1001 13:44:57.347920 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347949 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347989 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4229714179/tls.crt::/tmp/serving-cert-4229714179/tls.key\\\\\\\"\\\\nI1001 13:44:57.348126 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348139 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1001 13:44:57.348152 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348158 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1001 13:44:57.348154 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1001 13:44:57.348212 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1001 13:44:57.348222 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF1001 13:44:57.350577 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:41Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://113f3599c725570c4484ed005921c30a43db5ffec24b72907c6c7546453fe363\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:02Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:02 crc kubenswrapper[4605]: I1001 13:45:02.346141 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ca1d91d-0902-4d3a-b66a-a556b5009d8b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acf6d9c6b834cf378303c7ee6e1af3f3cde2502d8f28a6e5d3ec33deb69434b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4200a3723d31af3d800fca144949b047d3ef2960d856f286899351523593061\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://671dc002b0ca1a50b36373cbf0a8971b0f751989c9f19acedb524b09afd53517\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ab08a748b9c3040ea1af963f8ebeef630d7fb260122baba05229615424850d6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:02Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:02 crc kubenswrapper[4605]: I1001 13:45:02.358354 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c3d55cda00f6c5763662b3f96b4c36f1fb8c220fcd14ff3469b565deac718a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e6578a529a458eab6242d8f667520303c65bd53ceba7598d4c9680c7a93bace\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:02Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:02 crc kubenswrapper[4605]: I1001 13:45:02.378024 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:02Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:02 crc kubenswrapper[4605]: I1001 13:45:02.391920 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:02Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:03 crc kubenswrapper[4605]: I1001 13:45:03.177481 4605 generic.go:334] "Generic (PLEG): container finished" podID="b9fc4aae-03cb-458d-83cb-1a3ab9fa9639" containerID="0abe21aad2433d34c4a769e87e28f3c479511a4580df607d1496ed33c85385c0" exitCode=0 Oct 01 13:45:03 crc kubenswrapper[4605]: I1001 13:45:03.177554 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-xclfn" event={"ID":"b9fc4aae-03cb-458d-83cb-1a3ab9fa9639","Type":"ContainerDied","Data":"0abe21aad2433d34c4a769e87e28f3c479511a4580df607d1496ed33c85385c0"} Oct 01 13:45:03 crc kubenswrapper[4605]: I1001 13:45:03.192445 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c3d55cda00f6c5763662b3f96b4c36f1fb8c220fcd14ff3469b565deac718a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e6578a529a458eab6242d8f667520303c65bd53ceba7598d4c9680c7a93bace\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:03Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:03 crc kubenswrapper[4605]: I1001 13:45:03.207890 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:03Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:03 crc kubenswrapper[4605]: I1001 13:45:03.225636 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:03Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:03 crc kubenswrapper[4605]: I1001 13:45:03.241392 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3023060-c8ae-492b-b1cb-a418d9a8e59f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd1361d1d9cb03a3942918266a1e85d3e370eabdfa7b7b1e40971995928187b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae5b8e3f4bd159c632b04545707c7140ba6fcee21a3a3847d5e7f2b9e41b9178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdjh7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:03Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:03 crc kubenswrapper[4605]: I1001 13:45:03.259204 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xclfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9fc4aae-03cb-458d-83cb-1a3ab9fa9639\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://14d2a7aa9a0027ef28b5ec7b288ce0a5367c53f616ed3e75216d07c9809a86a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14d2a7aa9a0027ef28b5ec7b288ce0a5367c53f616ed3e75216d07c9809a86a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0abe21aad2433d34c4a769e87e28f3c479511a4580df607d1496ed33c85385c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0abe21aad2433d34c4a769e87e28f3c479511a4580df607d1496ed33c85385c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xclfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:03Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:03 crc kubenswrapper[4605]: I1001 13:45:03.276408 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wgx5p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c2ca71f-4cb0-4852-927d-af69be5d77f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1180cac382ded8ae1a7be2e5738d96beceed10f750d31e36ae1520416a71e8dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7gtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wgx5p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:03Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:03 crc kubenswrapper[4605]: I1001 13:45:03.297395 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-kzv4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:03Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:03 crc kubenswrapper[4605]: I1001 13:45:03.316133 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d3d0d59e6a31efc844c56e1ad43cd326a7b2f1844784f2814469e36394cf377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:03Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:03 crc kubenswrapper[4605]: I1001 13:45:03.329213 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64e2eda677e75448f2e7e3fd477052f3a596e6c11d745848becc6c22f133e6b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:03Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:03 crc kubenswrapper[4605]: I1001 13:45:03.342040 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6zb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83630902-b99b-4944-81a4-487e9584e0c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0762e1125229327b00202fd05bc17fd641b76f2421e20d0672b3e2d3b0f7538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jt4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6zb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:03Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:03 crc kubenswrapper[4605]: I1001 13:45:03.359228 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eda516c-c3a2-4e46-b9c2-b603ebc2d618\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c15c835868f9e04648db64fcf271a4c212d406e7bcc7cfca15167b853a02a70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d0dbfcc18f63ddc136fc049c023f326cb882adbcf1123f1ec5d4b884ae3970a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed56fd63f436435206d3574740efba75e8a94582fe128388e03b398131acd4ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80bd23f5be42dc5714bba2b0f742b8aa9fdd60540db1048054e76c00f356b240\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83498000b4c2e2ea1b498069626fa6916e9fc5e95a3ecf0136b2cb0ca8a409b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759326292\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759326291\\\\\\\\\\\\\\\" (2025-10-01 12:44:51 +0000 UTC to 2026-10-01 12:44:51 +0000 UTC (now=2025-10-01 13:44:57.347830124 +0000 UTC))\\\\\\\"\\\\nI1001 13:44:57.347872 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1001 13:44:57.347895 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1001 13:44:57.347920 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347949 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347989 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4229714179/tls.crt::/tmp/serving-cert-4229714179/tls.key\\\\\\\"\\\\nI1001 13:44:57.348126 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348139 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1001 13:44:57.348152 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348158 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1001 13:44:57.348154 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1001 13:44:57.348212 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1001 13:44:57.348222 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF1001 13:44:57.350577 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:41Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://113f3599c725570c4484ed005921c30a43db5ffec24b72907c6c7546453fe363\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:03Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:03 crc kubenswrapper[4605]: I1001 13:45:03.374793 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ca1d91d-0902-4d3a-b66a-a556b5009d8b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acf6d9c6b834cf378303c7ee6e1af3f3cde2502d8f28a6e5d3ec33deb69434b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4200a3723d31af3d800fca144949b047d3ef2960d856f286899351523593061\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://671dc002b0ca1a50b36373cbf0a8971b0f751989c9f19acedb524b09afd53517\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ab08a748b9c3040ea1af963f8ebeef630d7fb260122baba05229615424850d6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:03Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:03 crc kubenswrapper[4605]: I1001 13:45:03.392193 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:03Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:03 crc kubenswrapper[4605]: I1001 13:45:03.737130 4605 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 13:45:03 crc kubenswrapper[4605]: I1001 13:45:03.740458 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:03 crc kubenswrapper[4605]: I1001 13:45:03.740516 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:03 crc kubenswrapper[4605]: I1001 13:45:03.740529 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:03 crc kubenswrapper[4605]: I1001 13:45:03.740675 4605 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 01 13:45:03 crc kubenswrapper[4605]: I1001 13:45:03.747168 4605 kubelet_node_status.go:115] "Node was previously registered" node="crc" Oct 01 13:45:03 crc kubenswrapper[4605]: I1001 13:45:03.747407 4605 kubelet_node_status.go:79] "Successfully registered node" node="crc" Oct 01 13:45:03 crc kubenswrapper[4605]: I1001 13:45:03.748368 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:03 crc kubenswrapper[4605]: I1001 13:45:03.748408 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:03 crc kubenswrapper[4605]: I1001 13:45:03.748422 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:03 crc kubenswrapper[4605]: I1001 13:45:03.748441 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:03 crc kubenswrapper[4605]: I1001 13:45:03.748453 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:03Z","lastTransitionTime":"2025-10-01T13:45:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:03 crc kubenswrapper[4605]: E1001 13:45:03.773572 4605 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:03Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:03Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1d17ca42-5162-4e53-b9d0-0c11f7d91daa\\\",\\\"systemUUID\\\":\\\"1ac84113-1352-4ad6-8d32-f12829b39b5d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:03Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:03 crc kubenswrapper[4605]: I1001 13:45:03.778453 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:03 crc kubenswrapper[4605]: I1001 13:45:03.778511 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:03 crc kubenswrapper[4605]: I1001 13:45:03.778526 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:03 crc kubenswrapper[4605]: I1001 13:45:03.778548 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:03 crc kubenswrapper[4605]: I1001 13:45:03.778564 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:03Z","lastTransitionTime":"2025-10-01T13:45:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:03 crc kubenswrapper[4605]: E1001 13:45:03.792984 4605 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:03Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:03Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1d17ca42-5162-4e53-b9d0-0c11f7d91daa\\\",\\\"systemUUID\\\":\\\"1ac84113-1352-4ad6-8d32-f12829b39b5d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:03Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:03 crc kubenswrapper[4605]: I1001 13:45:03.797050 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:03 crc kubenswrapper[4605]: I1001 13:45:03.797127 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:03 crc kubenswrapper[4605]: I1001 13:45:03.797141 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:03 crc kubenswrapper[4605]: I1001 13:45:03.797161 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:03 crc kubenswrapper[4605]: I1001 13:45:03.797174 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:03Z","lastTransitionTime":"2025-10-01T13:45:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:03 crc kubenswrapper[4605]: E1001 13:45:03.811221 4605 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:03Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:03Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1d17ca42-5162-4e53-b9d0-0c11f7d91daa\\\",\\\"systemUUID\\\":\\\"1ac84113-1352-4ad6-8d32-f12829b39b5d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:03Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:03 crc kubenswrapper[4605]: I1001 13:45:03.815822 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:03 crc kubenswrapper[4605]: I1001 13:45:03.815868 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:03 crc kubenswrapper[4605]: I1001 13:45:03.815879 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:03 crc kubenswrapper[4605]: I1001 13:45:03.815905 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:03 crc kubenswrapper[4605]: I1001 13:45:03.815916 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:03Z","lastTransitionTime":"2025-10-01T13:45:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:03 crc kubenswrapper[4605]: E1001 13:45:03.829675 4605 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:03Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:03Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1d17ca42-5162-4e53-b9d0-0c11f7d91daa\\\",\\\"systemUUID\\\":\\\"1ac84113-1352-4ad6-8d32-f12829b39b5d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:03Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:03 crc kubenswrapper[4605]: I1001 13:45:03.835712 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:03 crc kubenswrapper[4605]: I1001 13:45:03.835866 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:03 crc kubenswrapper[4605]: I1001 13:45:03.835881 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:03 crc kubenswrapper[4605]: I1001 13:45:03.836005 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:03 crc kubenswrapper[4605]: I1001 13:45:03.836033 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:03Z","lastTransitionTime":"2025-10-01T13:45:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:03 crc kubenswrapper[4605]: E1001 13:45:03.850654 4605 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:03Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:03Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1d17ca42-5162-4e53-b9d0-0c11f7d91daa\\\",\\\"systemUUID\\\":\\\"1ac84113-1352-4ad6-8d32-f12829b39b5d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:03Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:03 crc kubenswrapper[4605]: E1001 13:45:03.850861 4605 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 01 13:45:03 crc kubenswrapper[4605]: I1001 13:45:03.852911 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:03 crc kubenswrapper[4605]: I1001 13:45:03.852953 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:03 crc kubenswrapper[4605]: I1001 13:45:03.852970 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:03 crc kubenswrapper[4605]: I1001 13:45:03.852992 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:03 crc kubenswrapper[4605]: I1001 13:45:03.853008 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:03Z","lastTransitionTime":"2025-10-01T13:45:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:03 crc kubenswrapper[4605]: I1001 13:45:03.925709 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:45:03 crc kubenswrapper[4605]: I1001 13:45:03.925763 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 13:45:03 crc kubenswrapper[4605]: I1001 13:45:03.925801 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 13:45:03 crc kubenswrapper[4605]: E1001 13:45:03.925899 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 13:45:03 crc kubenswrapper[4605]: E1001 13:45:03.926041 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 13:45:03 crc kubenswrapper[4605]: E1001 13:45:03.926141 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 13:45:03 crc kubenswrapper[4605]: I1001 13:45:03.955897 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:03 crc kubenswrapper[4605]: I1001 13:45:03.955950 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:03 crc kubenswrapper[4605]: I1001 13:45:03.955962 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:03 crc kubenswrapper[4605]: I1001 13:45:03.955982 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:03 crc kubenswrapper[4605]: I1001 13:45:03.955997 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:03Z","lastTransitionTime":"2025-10-01T13:45:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:04 crc kubenswrapper[4605]: I1001 13:45:04.059673 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:04 crc kubenswrapper[4605]: I1001 13:45:04.059752 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:04 crc kubenswrapper[4605]: I1001 13:45:04.059770 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:04 crc kubenswrapper[4605]: I1001 13:45:04.059804 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:04 crc kubenswrapper[4605]: I1001 13:45:04.059824 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:04Z","lastTransitionTime":"2025-10-01T13:45:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:04 crc kubenswrapper[4605]: I1001 13:45:04.162905 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:04 crc kubenswrapper[4605]: I1001 13:45:04.162969 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:04 crc kubenswrapper[4605]: I1001 13:45:04.162989 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:04 crc kubenswrapper[4605]: I1001 13:45:04.163021 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:04 crc kubenswrapper[4605]: I1001 13:45:04.163041 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:04Z","lastTransitionTime":"2025-10-01T13:45:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:04 crc kubenswrapper[4605]: I1001 13:45:04.192448 4605 generic.go:334] "Generic (PLEG): container finished" podID="b9fc4aae-03cb-458d-83cb-1a3ab9fa9639" containerID="4b870108ecd4816a8d0d3d32f4b2384246c0b4537639e9cd04d660fea3246244" exitCode=0 Oct 01 13:45:04 crc kubenswrapper[4605]: I1001 13:45:04.192512 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-xclfn" event={"ID":"b9fc4aae-03cb-458d-83cb-1a3ab9fa9639","Type":"ContainerDied","Data":"4b870108ecd4816a8d0d3d32f4b2384246c0b4537639e9cd04d660fea3246244"} Oct 01 13:45:04 crc kubenswrapper[4605]: I1001 13:45:04.224395 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-kzv4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:04Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:04 crc kubenswrapper[4605]: I1001 13:45:04.246413 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3023060-c8ae-492b-b1cb-a418d9a8e59f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd1361d1d9cb03a3942918266a1e85d3e370eabdfa7b7b1e40971995928187b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae5b8e3f4bd159c632b04545707c7140ba6fcee21a3a3847d5e7f2b9e41b9178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdjh7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:04Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:04 crc kubenswrapper[4605]: I1001 13:45:04.267529 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:04 crc kubenswrapper[4605]: I1001 13:45:04.267602 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:04 crc kubenswrapper[4605]: I1001 13:45:04.267629 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:04 crc kubenswrapper[4605]: I1001 13:45:04.267656 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:04 crc kubenswrapper[4605]: I1001 13:45:04.267675 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:04Z","lastTransitionTime":"2025-10-01T13:45:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:04 crc kubenswrapper[4605]: I1001 13:45:04.279572 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xclfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9fc4aae-03cb-458d-83cb-1a3ab9fa9639\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://14d2a7aa9a0027ef28b5ec7b288ce0a5367c53f616ed3e75216d07c9809a86a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14d2a7aa9a0027ef28b5ec7b288ce0a5367c53f616ed3e75216d07c9809a86a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0abe21aad2433d34c4a769e87e28f3c479511a4580df607d1496ed33c85385c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0abe21aad2433d34c4a769e87e28f3c479511a4580df607d1496ed33c85385c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b870108ecd4816a8d0d3d32f4b2384246c0b4537639e9cd04d660fea3246244\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b870108ecd4816a8d0d3d32f4b2384246c0b4537639e9cd04d660fea3246244\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xclfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:04Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:04 crc kubenswrapper[4605]: I1001 13:45:04.308990 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wgx5p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c2ca71f-4cb0-4852-927d-af69be5d77f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1180cac382ded8ae1a7be2e5738d96beceed10f750d31e36ae1520416a71e8dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7gtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wgx5p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:04Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:04 crc kubenswrapper[4605]: I1001 13:45:04.329199 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d3d0d59e6a31efc844c56e1ad43cd326a7b2f1844784f2814469e36394cf377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:04Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:04 crc kubenswrapper[4605]: I1001 13:45:04.348245 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64e2eda677e75448f2e7e3fd477052f3a596e6c11d745848becc6c22f133e6b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:04Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:04 crc kubenswrapper[4605]: I1001 13:45:04.362154 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6zb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83630902-b99b-4944-81a4-487e9584e0c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0762e1125229327b00202fd05bc17fd641b76f2421e20d0672b3e2d3b0f7538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jt4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6zb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:04Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:04 crc kubenswrapper[4605]: I1001 13:45:04.369938 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:04 crc kubenswrapper[4605]: I1001 13:45:04.369996 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:04 crc kubenswrapper[4605]: I1001 13:45:04.370012 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:04 crc kubenswrapper[4605]: I1001 13:45:04.370037 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:04 crc kubenswrapper[4605]: I1001 13:45:04.370053 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:04Z","lastTransitionTime":"2025-10-01T13:45:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:04 crc kubenswrapper[4605]: I1001 13:45:04.378163 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:04Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:04 crc kubenswrapper[4605]: I1001 13:45:04.394346 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eda516c-c3a2-4e46-b9c2-b603ebc2d618\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c15c835868f9e04648db64fcf271a4c212d406e7bcc7cfca15167b853a02a70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d0dbfcc18f63ddc136fc049c023f326cb882adbcf1123f1ec5d4b884ae3970a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed56fd63f436435206d3574740efba75e8a94582fe128388e03b398131acd4ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80bd23f5be42dc5714bba2b0f742b8aa9fdd60540db1048054e76c00f356b240\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83498000b4c2e2ea1b498069626fa6916e9fc5e95a3ecf0136b2cb0ca8a409b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759326292\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759326291\\\\\\\\\\\\\\\" (2025-10-01 12:44:51 +0000 UTC to 2026-10-01 12:44:51 +0000 UTC (now=2025-10-01 13:44:57.347830124 +0000 UTC))\\\\\\\"\\\\nI1001 13:44:57.347872 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1001 13:44:57.347895 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1001 13:44:57.347920 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347949 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347989 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4229714179/tls.crt::/tmp/serving-cert-4229714179/tls.key\\\\\\\"\\\\nI1001 13:44:57.348126 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348139 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1001 13:44:57.348152 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348158 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1001 13:44:57.348154 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1001 13:44:57.348212 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1001 13:44:57.348222 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF1001 13:44:57.350577 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:41Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://113f3599c725570c4484ed005921c30a43db5ffec24b72907c6c7546453fe363\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:04Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:04 crc kubenswrapper[4605]: I1001 13:45:04.412871 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ca1d91d-0902-4d3a-b66a-a556b5009d8b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acf6d9c6b834cf378303c7ee6e1af3f3cde2502d8f28a6e5d3ec33deb69434b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4200a3723d31af3d800fca144949b047d3ef2960d856f286899351523593061\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://671dc002b0ca1a50b36373cbf0a8971b0f751989c9f19acedb524b09afd53517\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ab08a748b9c3040ea1af963f8ebeef630d7fb260122baba05229615424850d6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:04Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:04 crc kubenswrapper[4605]: I1001 13:45:04.442594 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c3d55cda00f6c5763662b3f96b4c36f1fb8c220fcd14ff3469b565deac718a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e6578a529a458eab6242d8f667520303c65bd53ceba7598d4c9680c7a93bace\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:04Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:04 crc kubenswrapper[4605]: I1001 13:45:04.461203 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:04Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:04 crc kubenswrapper[4605]: I1001 13:45:04.473210 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:04 crc kubenswrapper[4605]: I1001 13:45:04.473254 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:04 crc kubenswrapper[4605]: I1001 13:45:04.473273 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:04 crc kubenswrapper[4605]: I1001 13:45:04.473318 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:04 crc kubenswrapper[4605]: I1001 13:45:04.473338 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:04Z","lastTransitionTime":"2025-10-01T13:45:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:04 crc kubenswrapper[4605]: I1001 13:45:04.477436 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:04Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:04 crc kubenswrapper[4605]: I1001 13:45:04.576788 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:04 crc kubenswrapper[4605]: I1001 13:45:04.576840 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:04 crc kubenswrapper[4605]: I1001 13:45:04.576854 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:04 crc kubenswrapper[4605]: I1001 13:45:04.576876 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:04 crc kubenswrapper[4605]: I1001 13:45:04.576890 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:04Z","lastTransitionTime":"2025-10-01T13:45:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:04 crc kubenswrapper[4605]: I1001 13:45:04.679309 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:04 crc kubenswrapper[4605]: I1001 13:45:04.679361 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:04 crc kubenswrapper[4605]: I1001 13:45:04.679373 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:04 crc kubenswrapper[4605]: I1001 13:45:04.679395 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:04 crc kubenswrapper[4605]: I1001 13:45:04.679407 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:04Z","lastTransitionTime":"2025-10-01T13:45:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:04 crc kubenswrapper[4605]: I1001 13:45:04.782879 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:04 crc kubenswrapper[4605]: I1001 13:45:04.782936 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:04 crc kubenswrapper[4605]: I1001 13:45:04.782946 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:04 crc kubenswrapper[4605]: I1001 13:45:04.782965 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:04 crc kubenswrapper[4605]: I1001 13:45:04.782985 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:04Z","lastTransitionTime":"2025-10-01T13:45:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:04 crc kubenswrapper[4605]: I1001 13:45:04.886814 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:04 crc kubenswrapper[4605]: I1001 13:45:04.886914 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:04 crc kubenswrapper[4605]: I1001 13:45:04.886942 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:04 crc kubenswrapper[4605]: I1001 13:45:04.886978 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:04 crc kubenswrapper[4605]: I1001 13:45:04.886997 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:04Z","lastTransitionTime":"2025-10-01T13:45:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:04 crc kubenswrapper[4605]: I1001 13:45:04.990818 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:04 crc kubenswrapper[4605]: I1001 13:45:04.990881 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:04 crc kubenswrapper[4605]: I1001 13:45:04.990901 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:04 crc kubenswrapper[4605]: I1001 13:45:04.990929 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:04 crc kubenswrapper[4605]: I1001 13:45:04.990953 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:04Z","lastTransitionTime":"2025-10-01T13:45:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:05 crc kubenswrapper[4605]: I1001 13:45:05.094430 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:05 crc kubenswrapper[4605]: I1001 13:45:05.094498 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:05 crc kubenswrapper[4605]: I1001 13:45:05.094516 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:05 crc kubenswrapper[4605]: I1001 13:45:05.094543 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:05 crc kubenswrapper[4605]: I1001 13:45:05.094567 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:05Z","lastTransitionTime":"2025-10-01T13:45:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:05 crc kubenswrapper[4605]: I1001 13:45:05.202270 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:05 crc kubenswrapper[4605]: I1001 13:45:05.202351 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:05 crc kubenswrapper[4605]: I1001 13:45:05.202370 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:05 crc kubenswrapper[4605]: I1001 13:45:05.202399 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:05 crc kubenswrapper[4605]: I1001 13:45:05.202419 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:05Z","lastTransitionTime":"2025-10-01T13:45:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:05 crc kubenswrapper[4605]: I1001 13:45:05.208483 4605 generic.go:334] "Generic (PLEG): container finished" podID="b9fc4aae-03cb-458d-83cb-1a3ab9fa9639" containerID="b96314b06953b56383a86305cf1702c5f02add621d7981a36444d2d3998cf0f7" exitCode=0 Oct 01 13:45:05 crc kubenswrapper[4605]: I1001 13:45:05.208632 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-xclfn" event={"ID":"b9fc4aae-03cb-458d-83cb-1a3ab9fa9639","Type":"ContainerDied","Data":"b96314b06953b56383a86305cf1702c5f02add621d7981a36444d2d3998cf0f7"} Oct 01 13:45:05 crc kubenswrapper[4605]: I1001 13:45:05.222724 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" event={"ID":"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae","Type":"ContainerStarted","Data":"2d1219f1caadb9f0992fa56b158a23ace3616d178360c4690657deb351a3ccaa"} Oct 01 13:45:05 crc kubenswrapper[4605]: I1001 13:45:05.266313 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64e2eda677e75448f2e7e3fd477052f3a596e6c11d745848becc6c22f133e6b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:05Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:05 crc kubenswrapper[4605]: I1001 13:45:05.292175 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6zb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83630902-b99b-4944-81a4-487e9584e0c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0762e1125229327b00202fd05bc17fd641b76f2421e20d0672b3e2d3b0f7538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jt4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6zb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:05Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:05 crc kubenswrapper[4605]: I1001 13:45:05.313497 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:05 crc kubenswrapper[4605]: I1001 13:45:05.313542 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:05 crc kubenswrapper[4605]: I1001 13:45:05.313555 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:05 crc kubenswrapper[4605]: I1001 13:45:05.313573 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:05 crc kubenswrapper[4605]: I1001 13:45:05.313583 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:05Z","lastTransitionTime":"2025-10-01T13:45:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:05 crc kubenswrapper[4605]: I1001 13:45:05.345414 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d3d0d59e6a31efc844c56e1ad43cd326a7b2f1844784f2814469e36394cf377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:05Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:05 crc kubenswrapper[4605]: I1001 13:45:05.369652 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eda516c-c3a2-4e46-b9c2-b603ebc2d618\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c15c835868f9e04648db64fcf271a4c212d406e7bcc7cfca15167b853a02a70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d0dbfcc18f63ddc136fc049c023f326cb882adbcf1123f1ec5d4b884ae3970a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed56fd63f436435206d3574740efba75e8a94582fe128388e03b398131acd4ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80bd23f5be42dc5714bba2b0f742b8aa9fdd60540db1048054e76c00f356b240\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83498000b4c2e2ea1b498069626fa6916e9fc5e95a3ecf0136b2cb0ca8a409b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759326292\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759326291\\\\\\\\\\\\\\\" (2025-10-01 12:44:51 +0000 UTC to 2026-10-01 12:44:51 +0000 UTC (now=2025-10-01 13:44:57.347830124 +0000 UTC))\\\\\\\"\\\\nI1001 13:44:57.347872 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1001 13:44:57.347895 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1001 13:44:57.347920 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347949 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347989 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4229714179/tls.crt::/tmp/serving-cert-4229714179/tls.key\\\\\\\"\\\\nI1001 13:44:57.348126 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348139 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1001 13:44:57.348152 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348158 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1001 13:44:57.348154 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1001 13:44:57.348212 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1001 13:44:57.348222 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF1001 13:44:57.350577 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:41Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://113f3599c725570c4484ed005921c30a43db5ffec24b72907c6c7546453fe363\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:05Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:05 crc kubenswrapper[4605]: I1001 13:45:05.385959 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ca1d91d-0902-4d3a-b66a-a556b5009d8b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acf6d9c6b834cf378303c7ee6e1af3f3cde2502d8f28a6e5d3ec33deb69434b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4200a3723d31af3d800fca144949b047d3ef2960d856f286899351523593061\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://671dc002b0ca1a50b36373cbf0a8971b0f751989c9f19acedb524b09afd53517\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ab08a748b9c3040ea1af963f8ebeef630d7fb260122baba05229615424850d6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:05Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:05 crc kubenswrapper[4605]: I1001 13:45:05.400445 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:05Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:05 crc kubenswrapper[4605]: I1001 13:45:05.415153 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:05Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:05 crc kubenswrapper[4605]: I1001 13:45:05.415963 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:05 crc kubenswrapper[4605]: I1001 13:45:05.416002 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:05 crc kubenswrapper[4605]: I1001 13:45:05.416014 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:05 crc kubenswrapper[4605]: I1001 13:45:05.416034 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:05 crc kubenswrapper[4605]: I1001 13:45:05.416046 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:05Z","lastTransitionTime":"2025-10-01T13:45:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:05 crc kubenswrapper[4605]: I1001 13:45:05.430986 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:05Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:05 crc kubenswrapper[4605]: I1001 13:45:05.447181 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c3d55cda00f6c5763662b3f96b4c36f1fb8c220fcd14ff3469b565deac718a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e6578a529a458eab6242d8f667520303c65bd53ceba7598d4c9680c7a93bace\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:05Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:05 crc kubenswrapper[4605]: I1001 13:45:05.463145 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xclfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9fc4aae-03cb-458d-83cb-1a3ab9fa9639\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://14d2a7aa9a0027ef28b5ec7b288ce0a5367c53f616ed3e75216d07c9809a86a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14d2a7aa9a0027ef28b5ec7b288ce0a5367c53f616ed3e75216d07c9809a86a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0abe21aad2433d34c4a769e87e28f3c479511a4580df607d1496ed33c85385c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0abe21aad2433d34c4a769e87e28f3c479511a4580df607d1496ed33c85385c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b870108ecd4816a8d0d3d32f4b2384246c0b4537639e9cd04d660fea3246244\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b870108ecd4816a8d0d3d32f4b2384246c0b4537639e9cd04d660fea3246244\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b96314b06953b56383a86305cf1702c5f02add621d7981a36444d2d3998cf0f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b96314b06953b56383a86305cf1702c5f02add621d7981a36444d2d3998cf0f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xclfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:05Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:05 crc kubenswrapper[4605]: I1001 13:45:05.477156 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wgx5p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c2ca71f-4cb0-4852-927d-af69be5d77f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1180cac382ded8ae1a7be2e5738d96beceed10f750d31e36ae1520416a71e8dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7gtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wgx5p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:05Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:05 crc kubenswrapper[4605]: I1001 13:45:05.494825 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-kzv4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:05Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:05 crc kubenswrapper[4605]: I1001 13:45:05.506840 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3023060-c8ae-492b-b1cb-a418d9a8e59f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd1361d1d9cb03a3942918266a1e85d3e370eabdfa7b7b1e40971995928187b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae5b8e3f4bd159c632b04545707c7140ba6fcee21a3a3847d5e7f2b9e41b9178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdjh7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:05Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:05 crc kubenswrapper[4605]: I1001 13:45:05.520859 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:05 crc kubenswrapper[4605]: I1001 13:45:05.520975 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:05 crc kubenswrapper[4605]: I1001 13:45:05.521001 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:05 crc kubenswrapper[4605]: I1001 13:45:05.521038 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:05 crc kubenswrapper[4605]: I1001 13:45:05.521062 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:05Z","lastTransitionTime":"2025-10-01T13:45:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:05 crc kubenswrapper[4605]: I1001 13:45:05.623946 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:05 crc kubenswrapper[4605]: I1001 13:45:05.623988 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:05 crc kubenswrapper[4605]: I1001 13:45:05.623997 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:05 crc kubenswrapper[4605]: I1001 13:45:05.624014 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:05 crc kubenswrapper[4605]: I1001 13:45:05.624024 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:05Z","lastTransitionTime":"2025-10-01T13:45:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:05 crc kubenswrapper[4605]: I1001 13:45:05.727256 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:05 crc kubenswrapper[4605]: I1001 13:45:05.727306 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:05 crc kubenswrapper[4605]: I1001 13:45:05.727319 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:05 crc kubenswrapper[4605]: I1001 13:45:05.727337 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:05 crc kubenswrapper[4605]: I1001 13:45:05.727349 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:05Z","lastTransitionTime":"2025-10-01T13:45:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:05 crc kubenswrapper[4605]: I1001 13:45:05.736173 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:45:05 crc kubenswrapper[4605]: I1001 13:45:05.736273 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:45:05 crc kubenswrapper[4605]: I1001 13:45:05.736299 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:45:05 crc kubenswrapper[4605]: I1001 13:45:05.736326 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 13:45:05 crc kubenswrapper[4605]: I1001 13:45:05.736350 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 13:45:05 crc kubenswrapper[4605]: E1001 13:45:05.736488 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 13:45:13.736446161 +0000 UTC m=+36.480422409 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:45:05 crc kubenswrapper[4605]: E1001 13:45:05.736501 4605 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 01 13:45:05 crc kubenswrapper[4605]: E1001 13:45:05.736554 4605 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 01 13:45:05 crc kubenswrapper[4605]: E1001 13:45:05.736574 4605 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 01 13:45:05 crc kubenswrapper[4605]: E1001 13:45:05.736587 4605 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 13:45:05 crc kubenswrapper[4605]: E1001 13:45:05.736594 4605 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 01 13:45:05 crc kubenswrapper[4605]: E1001 13:45:05.736619 4605 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 13:45:05 crc kubenswrapper[4605]: E1001 13:45:05.736642 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-01 13:45:13.736621656 +0000 UTC m=+36.480598044 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 13:45:05 crc kubenswrapper[4605]: E1001 13:45:05.736686 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-01 13:45:13.736669747 +0000 UTC m=+36.480645995 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 13:45:05 crc kubenswrapper[4605]: E1001 13:45:05.736785 4605 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 01 13:45:05 crc kubenswrapper[4605]: E1001 13:45:05.736829 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-01 13:45:13.736815311 +0000 UTC m=+36.480791549 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 01 13:45:05 crc kubenswrapper[4605]: E1001 13:45:05.737054 4605 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 01 13:45:05 crc kubenswrapper[4605]: E1001 13:45:05.737290 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-01 13:45:13.737261013 +0000 UTC m=+36.481237401 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 01 13:45:05 crc kubenswrapper[4605]: I1001 13:45:05.830597 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:05 crc kubenswrapper[4605]: I1001 13:45:05.830647 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:05 crc kubenswrapper[4605]: I1001 13:45:05.830662 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:05 crc kubenswrapper[4605]: I1001 13:45:05.830684 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:05 crc kubenswrapper[4605]: I1001 13:45:05.830697 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:05Z","lastTransitionTime":"2025-10-01T13:45:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:05 crc kubenswrapper[4605]: I1001 13:45:05.925826 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 13:45:05 crc kubenswrapper[4605]: I1001 13:45:05.925868 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 13:45:05 crc kubenswrapper[4605]: I1001 13:45:05.925938 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:45:05 crc kubenswrapper[4605]: E1001 13:45:05.926055 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 13:45:05 crc kubenswrapper[4605]: E1001 13:45:05.926180 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 13:45:05 crc kubenswrapper[4605]: E1001 13:45:05.926423 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 13:45:05 crc kubenswrapper[4605]: I1001 13:45:05.933346 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:05 crc kubenswrapper[4605]: I1001 13:45:05.933406 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:05 crc kubenswrapper[4605]: I1001 13:45:05.933424 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:05 crc kubenswrapper[4605]: I1001 13:45:05.933449 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:05 crc kubenswrapper[4605]: I1001 13:45:05.933472 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:05Z","lastTransitionTime":"2025-10-01T13:45:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:06 crc kubenswrapper[4605]: I1001 13:45:06.036503 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:06 crc kubenswrapper[4605]: I1001 13:45:06.036558 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:06 crc kubenswrapper[4605]: I1001 13:45:06.036570 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:06 crc kubenswrapper[4605]: I1001 13:45:06.036593 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:06 crc kubenswrapper[4605]: I1001 13:45:06.036606 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:06Z","lastTransitionTime":"2025-10-01T13:45:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:06 crc kubenswrapper[4605]: I1001 13:45:06.140269 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:06 crc kubenswrapper[4605]: I1001 13:45:06.140315 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:06 crc kubenswrapper[4605]: I1001 13:45:06.140330 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:06 crc kubenswrapper[4605]: I1001 13:45:06.140349 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:06 crc kubenswrapper[4605]: I1001 13:45:06.140363 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:06Z","lastTransitionTime":"2025-10-01T13:45:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:06 crc kubenswrapper[4605]: I1001 13:45:06.233236 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-xclfn" event={"ID":"b9fc4aae-03cb-458d-83cb-1a3ab9fa9639","Type":"ContainerStarted","Data":"24f0381694561100e524cc380f1a2c6050c7192f27ec8f27ad2e41abce969c1c"} Oct 01 13:45:06 crc kubenswrapper[4605]: I1001 13:45:06.242820 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:06 crc kubenswrapper[4605]: I1001 13:45:06.242882 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:06 crc kubenswrapper[4605]: I1001 13:45:06.242902 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:06 crc kubenswrapper[4605]: I1001 13:45:06.242933 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:06 crc kubenswrapper[4605]: I1001 13:45:06.242952 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:06Z","lastTransitionTime":"2025-10-01T13:45:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:06 crc kubenswrapper[4605]: I1001 13:45:06.260588 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d3d0d59e6a31efc844c56e1ad43cd326a7b2f1844784f2814469e36394cf377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:06Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:06 crc kubenswrapper[4605]: I1001 13:45:06.284064 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64e2eda677e75448f2e7e3fd477052f3a596e6c11d745848becc6c22f133e6b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:06Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:06 crc kubenswrapper[4605]: I1001 13:45:06.303296 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6zb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83630902-b99b-4944-81a4-487e9584e0c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0762e1125229327b00202fd05bc17fd641b76f2421e20d0672b3e2d3b0f7538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jt4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6zb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:06Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:06 crc kubenswrapper[4605]: I1001 13:45:06.318609 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eda516c-c3a2-4e46-b9c2-b603ebc2d618\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c15c835868f9e04648db64fcf271a4c212d406e7bcc7cfca15167b853a02a70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d0dbfcc18f63ddc136fc049c023f326cb882adbcf1123f1ec5d4b884ae3970a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed56fd63f436435206d3574740efba75e8a94582fe128388e03b398131acd4ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80bd23f5be42dc5714bba2b0f742b8aa9fdd60540db1048054e76c00f356b240\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83498000b4c2e2ea1b498069626fa6916e9fc5e95a3ecf0136b2cb0ca8a409b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759326292\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759326291\\\\\\\\\\\\\\\" (2025-10-01 12:44:51 +0000 UTC to 2026-10-01 12:44:51 +0000 UTC (now=2025-10-01 13:44:57.347830124 +0000 UTC))\\\\\\\"\\\\nI1001 13:44:57.347872 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1001 13:44:57.347895 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1001 13:44:57.347920 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347949 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347989 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4229714179/tls.crt::/tmp/serving-cert-4229714179/tls.key\\\\\\\"\\\\nI1001 13:44:57.348126 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348139 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1001 13:44:57.348152 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348158 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1001 13:44:57.348154 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1001 13:44:57.348212 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1001 13:44:57.348222 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF1001 13:44:57.350577 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:41Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://113f3599c725570c4484ed005921c30a43db5ffec24b72907c6c7546453fe363\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:06Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:06 crc kubenswrapper[4605]: I1001 13:45:06.335408 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ca1d91d-0902-4d3a-b66a-a556b5009d8b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acf6d9c6b834cf378303c7ee6e1af3f3cde2502d8f28a6e5d3ec33deb69434b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4200a3723d31af3d800fca144949b047d3ef2960d856f286899351523593061\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://671dc002b0ca1a50b36373cbf0a8971b0f751989c9f19acedb524b09afd53517\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ab08a748b9c3040ea1af963f8ebeef630d7fb260122baba05229615424850d6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:06Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:06 crc kubenswrapper[4605]: I1001 13:45:06.346276 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:06 crc kubenswrapper[4605]: I1001 13:45:06.346606 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:06 crc kubenswrapper[4605]: I1001 13:45:06.346713 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:06 crc kubenswrapper[4605]: I1001 13:45:06.346822 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:06 crc kubenswrapper[4605]: I1001 13:45:06.346909 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:06Z","lastTransitionTime":"2025-10-01T13:45:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:06 crc kubenswrapper[4605]: I1001 13:45:06.352343 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:06Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:06 crc kubenswrapper[4605]: I1001 13:45:06.368871 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c3d55cda00f6c5763662b3f96b4c36f1fb8c220fcd14ff3469b565deac718a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e6578a529a458eab6242d8f667520303c65bd53ceba7598d4c9680c7a93bace\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:06Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:06 crc kubenswrapper[4605]: I1001 13:45:06.382631 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:06Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:06 crc kubenswrapper[4605]: I1001 13:45:06.397068 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:06Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:06 crc kubenswrapper[4605]: I1001 13:45:06.410827 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3023060-c8ae-492b-b1cb-a418d9a8e59f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd1361d1d9cb03a3942918266a1e85d3e370eabdfa7b7b1e40971995928187b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae5b8e3f4bd159c632b04545707c7140ba6fcee21a3a3847d5e7f2b9e41b9178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdjh7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:06Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:06 crc kubenswrapper[4605]: I1001 13:45:06.429380 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xclfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9fc4aae-03cb-458d-83cb-1a3ab9fa9639\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://14d2a7aa9a0027ef28b5ec7b288ce0a5367c53f616ed3e75216d07c9809a86a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14d2a7aa9a0027ef28b5ec7b288ce0a5367c53f616ed3e75216d07c9809a86a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0abe21aad2433d34c4a769e87e28f3c479511a4580df607d1496ed33c85385c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0abe21aad2433d34c4a769e87e28f3c479511a4580df607d1496ed33c85385c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b870108ecd4816a8d0d3d32f4b2384246c0b4537639e9cd04d660fea3246244\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b870108ecd4816a8d0d3d32f4b2384246c0b4537639e9cd04d660fea3246244\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b96314b06953b56383a86305cf1702c5f02add621d7981a36444d2d3998cf0f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b96314b06953b56383a86305cf1702c5f02add621d7981a36444d2d3998cf0f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f0381694561100e524cc380f1a2c6050c7192f27ec8f27ad2e41abce969c1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xclfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:06Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:06 crc kubenswrapper[4605]: I1001 13:45:06.441554 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wgx5p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c2ca71f-4cb0-4852-927d-af69be5d77f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1180cac382ded8ae1a7be2e5738d96beceed10f750d31e36ae1520416a71e8dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7gtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wgx5p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:06Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:06 crc kubenswrapper[4605]: I1001 13:45:06.450059 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:06 crc kubenswrapper[4605]: I1001 13:45:06.450119 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:06 crc kubenswrapper[4605]: I1001 13:45:06.450133 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:06 crc kubenswrapper[4605]: I1001 13:45:06.450155 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:06 crc kubenswrapper[4605]: I1001 13:45:06.450170 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:06Z","lastTransitionTime":"2025-10-01T13:45:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:06 crc kubenswrapper[4605]: I1001 13:45:06.462428 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-kzv4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:06Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:06 crc kubenswrapper[4605]: I1001 13:45:06.552960 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:06 crc kubenswrapper[4605]: I1001 13:45:06.552998 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:06 crc kubenswrapper[4605]: I1001 13:45:06.553007 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:06 crc kubenswrapper[4605]: I1001 13:45:06.553021 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:06 crc kubenswrapper[4605]: I1001 13:45:06.553032 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:06Z","lastTransitionTime":"2025-10-01T13:45:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:06 crc kubenswrapper[4605]: I1001 13:45:06.656489 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:06 crc kubenswrapper[4605]: I1001 13:45:06.657168 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:06 crc kubenswrapper[4605]: I1001 13:45:06.657405 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:06 crc kubenswrapper[4605]: I1001 13:45:06.657608 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:06 crc kubenswrapper[4605]: I1001 13:45:06.657763 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:06Z","lastTransitionTime":"2025-10-01T13:45:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:06 crc kubenswrapper[4605]: I1001 13:45:06.761521 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:06 crc kubenswrapper[4605]: I1001 13:45:06.762012 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:06 crc kubenswrapper[4605]: I1001 13:45:06.762041 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:06 crc kubenswrapper[4605]: I1001 13:45:06.762080 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:06 crc kubenswrapper[4605]: I1001 13:45:06.762152 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:06Z","lastTransitionTime":"2025-10-01T13:45:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:06 crc kubenswrapper[4605]: I1001 13:45:06.866540 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:06 crc kubenswrapper[4605]: I1001 13:45:06.866607 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:06 crc kubenswrapper[4605]: I1001 13:45:06.866626 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:06 crc kubenswrapper[4605]: I1001 13:45:06.866654 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:06 crc kubenswrapper[4605]: I1001 13:45:06.866685 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:06Z","lastTransitionTime":"2025-10-01T13:45:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:06 crc kubenswrapper[4605]: I1001 13:45:06.969227 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:06 crc kubenswrapper[4605]: I1001 13:45:06.969278 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:06 crc kubenswrapper[4605]: I1001 13:45:06.969290 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:06 crc kubenswrapper[4605]: I1001 13:45:06.969334 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:06 crc kubenswrapper[4605]: I1001 13:45:06.969348 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:06Z","lastTransitionTime":"2025-10-01T13:45:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.072328 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.072388 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.072403 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.072424 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.072437 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:07Z","lastTransitionTime":"2025-10-01T13:45:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.178926 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.178987 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.178999 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.179020 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.179033 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:07Z","lastTransitionTime":"2025-10-01T13:45:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.242147 4605 generic.go:334] "Generic (PLEG): container finished" podID="b9fc4aae-03cb-458d-83cb-1a3ab9fa9639" containerID="24f0381694561100e524cc380f1a2c6050c7192f27ec8f27ad2e41abce969c1c" exitCode=0 Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.242261 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-xclfn" event={"ID":"b9fc4aae-03cb-458d-83cb-1a3ab9fa9639","Type":"ContainerDied","Data":"24f0381694561100e524cc380f1a2c6050c7192f27ec8f27ad2e41abce969c1c"} Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.250493 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" event={"ID":"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae","Type":"ContainerStarted","Data":"8caf90cf5a2d382fcaf4732b5526ef00b1085e9d882b21e599a513188db5cc24"} Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.250916 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.251194 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.251236 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.259055 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3023060-c8ae-492b-b1cb-a418d9a8e59f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd1361d1d9cb03a3942918266a1e85d3e370eabdfa7b7b1e40971995928187b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae5b8e3f4bd159c632b04545707c7140ba6fcee21a3a3847d5e7f2b9e41b9178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdjh7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:07Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.281341 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.281376 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.281386 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.281402 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.281413 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:07Z","lastTransitionTime":"2025-10-01T13:45:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.283707 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xclfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9fc4aae-03cb-458d-83cb-1a3ab9fa9639\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://14d2a7aa9a0027ef28b5ec7b288ce0a5367c53f616ed3e75216d07c9809a86a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14d2a7aa9a0027ef28b5ec7b288ce0a5367c53f616ed3e75216d07c9809a86a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0abe21aad2433d34c4a769e87e28f3c479511a4580df607d1496ed33c85385c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0abe21aad2433d34c4a769e87e28f3c479511a4580df607d1496ed33c85385c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b870108ecd4816a8d0d3d32f4b2384246c0b4537639e9cd04d660fea3246244\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b870108ecd4816a8d0d3d32f4b2384246c0b4537639e9cd04d660fea3246244\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b96314b06953b56383a86305cf1702c5f02add621d7981a36444d2d3998cf0f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b96314b06953b56383a86305cf1702c5f02add621d7981a36444d2d3998cf0f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f0381694561100e524cc380f1a2c6050c7192f27ec8f27ad2e41abce969c1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24f0381694561100e524cc380f1a2c6050c7192f27ec8f27ad2e41abce969c1c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xclfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:07Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.294868 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.301524 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wgx5p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c2ca71f-4cb0-4852-927d-af69be5d77f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1180cac382ded8ae1a7be2e5738d96beceed10f750d31e36ae1520416a71e8dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7gtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wgx5p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:07Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.301743 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.320491 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-kzv4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:07Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.334064 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d3d0d59e6a31efc844c56e1ad43cd326a7b2f1844784f2814469e36394cf377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:07Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.347007 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64e2eda677e75448f2e7e3fd477052f3a596e6c11d745848becc6c22f133e6b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:07Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.362792 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6zb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83630902-b99b-4944-81a4-487e9584e0c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0762e1125229327b00202fd05bc17fd641b76f2421e20d0672b3e2d3b0f7538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jt4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6zb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:07Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.379620 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eda516c-c3a2-4e46-b9c2-b603ebc2d618\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c15c835868f9e04648db64fcf271a4c212d406e7bcc7cfca15167b853a02a70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d0dbfcc18f63ddc136fc049c023f326cb882adbcf1123f1ec5d4b884ae3970a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed56fd63f436435206d3574740efba75e8a94582fe128388e03b398131acd4ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80bd23f5be42dc5714bba2b0f742b8aa9fdd60540db1048054e76c00f356b240\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83498000b4c2e2ea1b498069626fa6916e9fc5e95a3ecf0136b2cb0ca8a409b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759326292\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759326291\\\\\\\\\\\\\\\" (2025-10-01 12:44:51 +0000 UTC to 2026-10-01 12:44:51 +0000 UTC (now=2025-10-01 13:44:57.347830124 +0000 UTC))\\\\\\\"\\\\nI1001 13:44:57.347872 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1001 13:44:57.347895 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1001 13:44:57.347920 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347949 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347989 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4229714179/tls.crt::/tmp/serving-cert-4229714179/tls.key\\\\\\\"\\\\nI1001 13:44:57.348126 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348139 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1001 13:44:57.348152 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348158 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1001 13:44:57.348154 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1001 13:44:57.348212 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1001 13:44:57.348222 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF1001 13:44:57.350577 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:41Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://113f3599c725570c4484ed005921c30a43db5ffec24b72907c6c7546453fe363\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:07Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.385374 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.385416 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.385430 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.385451 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.385470 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:07Z","lastTransitionTime":"2025-10-01T13:45:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.404191 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ca1d91d-0902-4d3a-b66a-a556b5009d8b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acf6d9c6b834cf378303c7ee6e1af3f3cde2502d8f28a6e5d3ec33deb69434b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4200a3723d31af3d800fca144949b047d3ef2960d856f286899351523593061\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://671dc002b0ca1a50b36373cbf0a8971b0f751989c9f19acedb524b09afd53517\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ab08a748b9c3040ea1af963f8ebeef630d7fb260122baba05229615424850d6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:07Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.422920 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:07Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.438589 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c3d55cda00f6c5763662b3f96b4c36f1fb8c220fcd14ff3469b565deac718a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e6578a529a458eab6242d8f667520303c65bd53ceba7598d4c9680c7a93bace\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:07Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.453760 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:07Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.468082 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:07Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.484719 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c3d55cda00f6c5763662b3f96b4c36f1fb8c220fcd14ff3469b565deac718a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e6578a529a458eab6242d8f667520303c65bd53ceba7598d4c9680c7a93bace\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:07Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.489604 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.489639 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.489652 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.489675 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.489693 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:07Z","lastTransitionTime":"2025-10-01T13:45:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.503200 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:07Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.519621 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:07Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.544342 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://56f2691df87d1c843a73c06c39c7ad1d5477fb1f10c6f3f797f658e7d0c0d04d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09ecc57bc8186c325ef0bd38c17882024dda3714b52f9b0295a75ed93b2d6736\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f6f39aa4d204b41067d2705ce828e79752ca4e5b7689821e34c98f5041ce925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddba08349bbb0f7040bb04f37d4b5a137de327a5c4a7a5689396a2a1925b062a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://98143f416c776bf79ee24b42b70d88d1cbe4528bd7a1c0fc886a8f6e711c6400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8442849a4d37d618e701a2ea4c1dd33944894765c874fd5635c74d37a7f411b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8caf90cf5a2d382fcaf4732b5526ef00b1085e9d882b21e599a513188db5cc24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d1219f1caadb9f0992fa56b158a23ace3616d178360c4690657deb351a3ccaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-kzv4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:07Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.558342 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3023060-c8ae-492b-b1cb-a418d9a8e59f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd1361d1d9cb03a3942918266a1e85d3e370eabdfa7b7b1e40971995928187b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae5b8e3f4bd159c632b04545707c7140ba6fcee21a3a3847d5e7f2b9e41b9178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdjh7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:07Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.575629 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xclfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9fc4aae-03cb-458d-83cb-1a3ab9fa9639\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://14d2a7aa9a0027ef28b5ec7b288ce0a5367c53f616ed3e75216d07c9809a86a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14d2a7aa9a0027ef28b5ec7b288ce0a5367c53f616ed3e75216d07c9809a86a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0abe21aad2433d34c4a769e87e28f3c479511a4580df607d1496ed33c85385c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0abe21aad2433d34c4a769e87e28f3c479511a4580df607d1496ed33c85385c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b870108ecd4816a8d0d3d32f4b2384246c0b4537639e9cd04d660fea3246244\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b870108ecd4816a8d0d3d32f4b2384246c0b4537639e9cd04d660fea3246244\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b96314b06953b56383a86305cf1702c5f02add621d7981a36444d2d3998cf0f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b96314b06953b56383a86305cf1702c5f02add621d7981a36444d2d3998cf0f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f0381694561100e524cc380f1a2c6050c7192f27ec8f27ad2e41abce969c1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24f0381694561100e524cc380f1a2c6050c7192f27ec8f27ad2e41abce969c1c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xclfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:07Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.590410 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wgx5p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c2ca71f-4cb0-4852-927d-af69be5d77f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1180cac382ded8ae1a7be2e5738d96beceed10f750d31e36ae1520416a71e8dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7gtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wgx5p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:07Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.592170 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.592219 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.592239 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.592261 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.592279 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:07Z","lastTransitionTime":"2025-10-01T13:45:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.606198 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d3d0d59e6a31efc844c56e1ad43cd326a7b2f1844784f2814469e36394cf377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:07Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.620230 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64e2eda677e75448f2e7e3fd477052f3a596e6c11d745848becc6c22f133e6b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:07Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.631863 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6zb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83630902-b99b-4944-81a4-487e9584e0c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0762e1125229327b00202fd05bc17fd641b76f2421e20d0672b3e2d3b0f7538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jt4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6zb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:07Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.641905 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-jvqzn"] Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.642513 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-jvqzn" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.647033 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.647077 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.647841 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.648460 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.649323 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:07Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.670220 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eda516c-c3a2-4e46-b9c2-b603ebc2d618\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c15c835868f9e04648db64fcf271a4c212d406e7bcc7cfca15167b853a02a70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d0dbfcc18f63ddc136fc049c023f326cb882adbcf1123f1ec5d4b884ae3970a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed56fd63f436435206d3574740efba75e8a94582fe128388e03b398131acd4ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80bd23f5be42dc5714bba2b0f742b8aa9fdd60540db1048054e76c00f356b240\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83498000b4c2e2ea1b498069626fa6916e9fc5e95a3ecf0136b2cb0ca8a409b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759326292\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759326291\\\\\\\\\\\\\\\" (2025-10-01 12:44:51 +0000 UTC to 2026-10-01 12:44:51 +0000 UTC (now=2025-10-01 13:44:57.347830124 +0000 UTC))\\\\\\\"\\\\nI1001 13:44:57.347872 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1001 13:44:57.347895 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1001 13:44:57.347920 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347949 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347989 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4229714179/tls.crt::/tmp/serving-cert-4229714179/tls.key\\\\\\\"\\\\nI1001 13:44:57.348126 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348139 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1001 13:44:57.348152 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348158 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1001 13:44:57.348154 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1001 13:44:57.348212 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1001 13:44:57.348222 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF1001 13:44:57.350577 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:41Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://113f3599c725570c4484ed005921c30a43db5ffec24b72907c6c7546453fe363\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:07Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.682769 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ca1d91d-0902-4d3a-b66a-a556b5009d8b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acf6d9c6b834cf378303c7ee6e1af3f3cde2502d8f28a6e5d3ec33deb69434b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4200a3723d31af3d800fca144949b047d3ef2960d856f286899351523593061\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://671dc002b0ca1a50b36373cbf0a8971b0f751989c9f19acedb524b09afd53517\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ab08a748b9c3040ea1af963f8ebeef630d7fb260122baba05229615424850d6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:07Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.695036 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.695111 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.695123 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.695140 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.695154 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:07Z","lastTransitionTime":"2025-10-01T13:45:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.697919 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c3d55cda00f6c5763662b3f96b4c36f1fb8c220fcd14ff3469b565deac718a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e6578a529a458eab6242d8f667520303c65bd53ceba7598d4c9680c7a93bace\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:07Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.711081 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:07Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.727561 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:07Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.742845 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3023060-c8ae-492b-b1cb-a418d9a8e59f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd1361d1d9cb03a3942918266a1e85d3e370eabdfa7b7b1e40971995928187b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae5b8e3f4bd159c632b04545707c7140ba6fcee21a3a3847d5e7f2b9e41b9178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdjh7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:07Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.754355 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-985lh\" (UniqueName: \"kubernetes.io/projected/a4d5a988-e2c8-47db-b738-cb43467b1bfb-kube-api-access-985lh\") pod \"node-ca-jvqzn\" (UID: \"a4d5a988-e2c8-47db-b738-cb43467b1bfb\") " pod="openshift-image-registry/node-ca-jvqzn" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.754438 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/a4d5a988-e2c8-47db-b738-cb43467b1bfb-serviceca\") pod \"node-ca-jvqzn\" (UID: \"a4d5a988-e2c8-47db-b738-cb43467b1bfb\") " pod="openshift-image-registry/node-ca-jvqzn" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.754555 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a4d5a988-e2c8-47db-b738-cb43467b1bfb-host\") pod \"node-ca-jvqzn\" (UID: \"a4d5a988-e2c8-47db-b738-cb43467b1bfb\") " pod="openshift-image-registry/node-ca-jvqzn" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.759876 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xclfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9fc4aae-03cb-458d-83cb-1a3ab9fa9639\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://14d2a7aa9a0027ef28b5ec7b288ce0a5367c53f616ed3e75216d07c9809a86a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14d2a7aa9a0027ef28b5ec7b288ce0a5367c53f616ed3e75216d07c9809a86a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0abe21aad2433d34c4a769e87e28f3c479511a4580df607d1496ed33c85385c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0abe21aad2433d34c4a769e87e28f3c479511a4580df607d1496ed33c85385c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b870108ecd4816a8d0d3d32f4b2384246c0b4537639e9cd04d660fea3246244\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b870108ecd4816a8d0d3d32f4b2384246c0b4537639e9cd04d660fea3246244\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b96314b06953b56383a86305cf1702c5f02add621d7981a36444d2d3998cf0f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b96314b06953b56383a86305cf1702c5f02add621d7981a36444d2d3998cf0f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f0381694561100e524cc380f1a2c6050c7192f27ec8f27ad2e41abce969c1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24f0381694561100e524cc380f1a2c6050c7192f27ec8f27ad2e41abce969c1c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xclfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:07Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.774577 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wgx5p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c2ca71f-4cb0-4852-927d-af69be5d77f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1180cac382ded8ae1a7be2e5738d96beceed10f750d31e36ae1520416a71e8dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7gtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wgx5p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:07Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.796512 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://56f2691df87d1c843a73c06c39c7ad1d5477fb1f10c6f3f797f658e7d0c0d04d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09ecc57bc8186c325ef0bd38c17882024dda3714b52f9b0295a75ed93b2d6736\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f6f39aa4d204b41067d2705ce828e79752ca4e5b7689821e34c98f5041ce925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddba08349bbb0f7040bb04f37d4b5a137de327a5c4a7a5689396a2a1925b062a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://98143f416c776bf79ee24b42b70d88d1cbe4528bd7a1c0fc886a8f6e711c6400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8442849a4d37d618e701a2ea4c1dd33944894765c874fd5635c74d37a7f411b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8caf90cf5a2d382fcaf4732b5526ef00b1085e9d882b21e599a513188db5cc24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d1219f1caadb9f0992fa56b158a23ace3616d178360c4690657deb351a3ccaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-kzv4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:07Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.797422 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.797494 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.797511 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.797538 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.797557 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:07Z","lastTransitionTime":"2025-10-01T13:45:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.814856 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d3d0d59e6a31efc844c56e1ad43cd326a7b2f1844784f2814469e36394cf377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:07Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.828119 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64e2eda677e75448f2e7e3fd477052f3a596e6c11d745848becc6c22f133e6b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:07Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.841201 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6zb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83630902-b99b-4944-81a4-487e9584e0c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0762e1125229327b00202fd05bc17fd641b76f2421e20d0672b3e2d3b0f7538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jt4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6zb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:07Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.852467 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jvqzn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4d5a988-e2c8-47db-b738-cb43467b1bfb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:07Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:07Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-985lh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:45:07Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jvqzn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:07Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.854975 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-985lh\" (UniqueName: \"kubernetes.io/projected/a4d5a988-e2c8-47db-b738-cb43467b1bfb-kube-api-access-985lh\") pod \"node-ca-jvqzn\" (UID: \"a4d5a988-e2c8-47db-b738-cb43467b1bfb\") " pod="openshift-image-registry/node-ca-jvqzn" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.855014 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/a4d5a988-e2c8-47db-b738-cb43467b1bfb-serviceca\") pod \"node-ca-jvqzn\" (UID: \"a4d5a988-e2c8-47db-b738-cb43467b1bfb\") " pod="openshift-image-registry/node-ca-jvqzn" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.855067 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a4d5a988-e2c8-47db-b738-cb43467b1bfb-host\") pod \"node-ca-jvqzn\" (UID: \"a4d5a988-e2c8-47db-b738-cb43467b1bfb\") " pod="openshift-image-registry/node-ca-jvqzn" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.855162 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a4d5a988-e2c8-47db-b738-cb43467b1bfb-host\") pod \"node-ca-jvqzn\" (UID: \"a4d5a988-e2c8-47db-b738-cb43467b1bfb\") " pod="openshift-image-registry/node-ca-jvqzn" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.856318 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/a4d5a988-e2c8-47db-b738-cb43467b1bfb-serviceca\") pod \"node-ca-jvqzn\" (UID: \"a4d5a988-e2c8-47db-b738-cb43467b1bfb\") " pod="openshift-image-registry/node-ca-jvqzn" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.865653 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:07Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.879199 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-985lh\" (UniqueName: \"kubernetes.io/projected/a4d5a988-e2c8-47db-b738-cb43467b1bfb-kube-api-access-985lh\") pod \"node-ca-jvqzn\" (UID: \"a4d5a988-e2c8-47db-b738-cb43467b1bfb\") " pod="openshift-image-registry/node-ca-jvqzn" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.883304 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eda516c-c3a2-4e46-b9c2-b603ebc2d618\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c15c835868f9e04648db64fcf271a4c212d406e7bcc7cfca15167b853a02a70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d0dbfcc18f63ddc136fc049c023f326cb882adbcf1123f1ec5d4b884ae3970a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed56fd63f436435206d3574740efba75e8a94582fe128388e03b398131acd4ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80bd23f5be42dc5714bba2b0f742b8aa9fdd60540db1048054e76c00f356b240\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83498000b4c2e2ea1b498069626fa6916e9fc5e95a3ecf0136b2cb0ca8a409b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759326292\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759326291\\\\\\\\\\\\\\\" (2025-10-01 12:44:51 +0000 UTC to 2026-10-01 12:44:51 +0000 UTC (now=2025-10-01 13:44:57.347830124 +0000 UTC))\\\\\\\"\\\\nI1001 13:44:57.347872 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1001 13:44:57.347895 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1001 13:44:57.347920 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347949 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347989 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4229714179/tls.crt::/tmp/serving-cert-4229714179/tls.key\\\\\\\"\\\\nI1001 13:44:57.348126 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348139 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1001 13:44:57.348152 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348158 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1001 13:44:57.348154 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1001 13:44:57.348212 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1001 13:44:57.348222 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF1001 13:44:57.350577 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:41Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://113f3599c725570c4484ed005921c30a43db5ffec24b72907c6c7546453fe363\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:07Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.898077 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ca1d91d-0902-4d3a-b66a-a556b5009d8b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acf6d9c6b834cf378303c7ee6e1af3f3cde2502d8f28a6e5d3ec33deb69434b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4200a3723d31af3d800fca144949b047d3ef2960d856f286899351523593061\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://671dc002b0ca1a50b36373cbf0a8971b0f751989c9f19acedb524b09afd53517\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ab08a748b9c3040ea1af963f8ebeef630d7fb260122baba05229615424850d6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:07Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.900927 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.900969 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.900982 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.900999 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.901010 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:07Z","lastTransitionTime":"2025-10-01T13:45:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.926332 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.926446 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 13:45:07 crc kubenswrapper[4605]: E1001 13:45:07.926485 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.926537 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:45:07 crc kubenswrapper[4605]: E1001 13:45:07.926635 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 13:45:07 crc kubenswrapper[4605]: E1001 13:45:07.926814 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.943895 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6zb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83630902-b99b-4944-81a4-487e9584e0c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0762e1125229327b00202fd05bc17fd641b76f2421e20d0672b3e2d3b0f7538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jt4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6zb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:07Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.954867 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-jvqzn" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.954906 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jvqzn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4d5a988-e2c8-47db-b738-cb43467b1bfb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:07Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:07Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-985lh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:45:07Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jvqzn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:07Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:07 crc kubenswrapper[4605]: W1001 13:45:07.966927 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda4d5a988_e2c8_47db_b738_cb43467b1bfb.slice/crio-00ca8f8c3e4e8bbf99516077e313e145fbbd9b4a7b830ef738991c19641caf13 WatchSource:0}: Error finding container 00ca8f8c3e4e8bbf99516077e313e145fbbd9b4a7b830ef738991c19641caf13: Status 404 returned error can't find the container with id 00ca8f8c3e4e8bbf99516077e313e145fbbd9b4a7b830ef738991c19641caf13 Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.978083 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d3d0d59e6a31efc844c56e1ad43cd326a7b2f1844784f2814469e36394cf377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:07Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:07 crc kubenswrapper[4605]: I1001 13:45:07.998481 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64e2eda677e75448f2e7e3fd477052f3a596e6c11d745848becc6c22f133e6b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:07Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.003114 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.003163 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.003180 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.003202 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.003217 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:08Z","lastTransitionTime":"2025-10-01T13:45:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.015712 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ca1d91d-0902-4d3a-b66a-a556b5009d8b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acf6d9c6b834cf378303c7ee6e1af3f3cde2502d8f28a6e5d3ec33deb69434b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4200a3723d31af3d800fca144949b047d3ef2960d856f286899351523593061\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://671dc002b0ca1a50b36373cbf0a8971b0f751989c9f19acedb524b09afd53517\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ab08a748b9c3040ea1af963f8ebeef630d7fb260122baba05229615424850d6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:08Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.033453 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:08Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.054726 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eda516c-c3a2-4e46-b9c2-b603ebc2d618\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c15c835868f9e04648db64fcf271a4c212d406e7bcc7cfca15167b853a02a70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d0dbfcc18f63ddc136fc049c023f326cb882adbcf1123f1ec5d4b884ae3970a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed56fd63f436435206d3574740efba75e8a94582fe128388e03b398131acd4ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80bd23f5be42dc5714bba2b0f742b8aa9fdd60540db1048054e76c00f356b240\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83498000b4c2e2ea1b498069626fa6916e9fc5e95a3ecf0136b2cb0ca8a409b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759326292\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759326291\\\\\\\\\\\\\\\" (2025-10-01 12:44:51 +0000 UTC to 2026-10-01 12:44:51 +0000 UTC (now=2025-10-01 13:44:57.347830124 +0000 UTC))\\\\\\\"\\\\nI1001 13:44:57.347872 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1001 13:44:57.347895 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1001 13:44:57.347920 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347949 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347989 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4229714179/tls.crt::/tmp/serving-cert-4229714179/tls.key\\\\\\\"\\\\nI1001 13:44:57.348126 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348139 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1001 13:44:57.348152 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348158 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1001 13:44:57.348154 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1001 13:44:57.348212 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1001 13:44:57.348222 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF1001 13:44:57.350577 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:41Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://113f3599c725570c4484ed005921c30a43db5ffec24b72907c6c7546453fe363\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:08Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.071954 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:08Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.091932 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c3d55cda00f6c5763662b3f96b4c36f1fb8c220fcd14ff3469b565deac718a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e6578a529a458eab6242d8f667520303c65bd53ceba7598d4c9680c7a93bace\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:08Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.106911 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.106954 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.106967 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.106986 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.106998 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:08Z","lastTransitionTime":"2025-10-01T13:45:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.107475 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:08Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.123015 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wgx5p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c2ca71f-4cb0-4852-927d-af69be5d77f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1180cac382ded8ae1a7be2e5738d96beceed10f750d31e36ae1520416a71e8dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7gtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wgx5p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:08Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.144643 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://56f2691df87d1c843a73c06c39c7ad1d5477fb1f10c6f3f797f658e7d0c0d04d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09ecc57bc8186c325ef0bd38c17882024dda3714b52f9b0295a75ed93b2d6736\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f6f39aa4d204b41067d2705ce828e79752ca4e5b7689821e34c98f5041ce925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddba08349bbb0f7040bb04f37d4b5a137de327a5c4a7a5689396a2a1925b062a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://98143f416c776bf79ee24b42b70d88d1cbe4528bd7a1c0fc886a8f6e711c6400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8442849a4d37d618e701a2ea4c1dd33944894765c874fd5635c74d37a7f411b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8caf90cf5a2d382fcaf4732b5526ef00b1085e9d882b21e599a513188db5cc24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d1219f1caadb9f0992fa56b158a23ace3616d178360c4690657deb351a3ccaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-kzv4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:08Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.160618 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3023060-c8ae-492b-b1cb-a418d9a8e59f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd1361d1d9cb03a3942918266a1e85d3e370eabdfa7b7b1e40971995928187b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae5b8e3f4bd159c632b04545707c7140ba6fcee21a3a3847d5e7f2b9e41b9178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdjh7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:08Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.177949 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xclfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9fc4aae-03cb-458d-83cb-1a3ab9fa9639\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://14d2a7aa9a0027ef28b5ec7b288ce0a5367c53f616ed3e75216d07c9809a86a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14d2a7aa9a0027ef28b5ec7b288ce0a5367c53f616ed3e75216d07c9809a86a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0abe21aad2433d34c4a769e87e28f3c479511a4580df607d1496ed33c85385c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0abe21aad2433d34c4a769e87e28f3c479511a4580df607d1496ed33c85385c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b870108ecd4816a8d0d3d32f4b2384246c0b4537639e9cd04d660fea3246244\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b870108ecd4816a8d0d3d32f4b2384246c0b4537639e9cd04d660fea3246244\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b96314b06953b56383a86305cf1702c5f02add621d7981a36444d2d3998cf0f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b96314b06953b56383a86305cf1702c5f02add621d7981a36444d2d3998cf0f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f0381694561100e524cc380f1a2c6050c7192f27ec8f27ad2e41abce969c1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24f0381694561100e524cc380f1a2c6050c7192f27ec8f27ad2e41abce969c1c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xclfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:08Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.209283 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.209361 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.209372 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.209389 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.209401 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:08Z","lastTransitionTime":"2025-10-01T13:45:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.262001 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-jvqzn" event={"ID":"a4d5a988-e2c8-47db-b738-cb43467b1bfb","Type":"ContainerStarted","Data":"17326e690c100180a8e3d1a0180522378ef995bd1f5cba52d6f147a958351118"} Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.262060 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-jvqzn" event={"ID":"a4d5a988-e2c8-47db-b738-cb43467b1bfb","Type":"ContainerStarted","Data":"00ca8f8c3e4e8bbf99516077e313e145fbbd9b4a7b830ef738991c19641caf13"} Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.267730 4605 generic.go:334] "Generic (PLEG): container finished" podID="b9fc4aae-03cb-458d-83cb-1a3ab9fa9639" containerID="bfb30493a0c650f9536bba6417da0a582b1b23b6488223359856c12d1024d57f" exitCode=0 Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.268159 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-xclfn" event={"ID":"b9fc4aae-03cb-458d-83cb-1a3ab9fa9639","Type":"ContainerDied","Data":"bfb30493a0c650f9536bba6417da0a582b1b23b6488223359856c12d1024d57f"} Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.278468 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3023060-c8ae-492b-b1cb-a418d9a8e59f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd1361d1d9cb03a3942918266a1e85d3e370eabdfa7b7b1e40971995928187b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae5b8e3f4bd159c632b04545707c7140ba6fcee21a3a3847d5e7f2b9e41b9178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdjh7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:08Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.297748 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xclfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9fc4aae-03cb-458d-83cb-1a3ab9fa9639\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://14d2a7aa9a0027ef28b5ec7b288ce0a5367c53f616ed3e75216d07c9809a86a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14d2a7aa9a0027ef28b5ec7b288ce0a5367c53f616ed3e75216d07c9809a86a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0abe21aad2433d34c4a769e87e28f3c479511a4580df607d1496ed33c85385c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0abe21aad2433d34c4a769e87e28f3c479511a4580df607d1496ed33c85385c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b870108ecd4816a8d0d3d32f4b2384246c0b4537639e9cd04d660fea3246244\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b870108ecd4816a8d0d3d32f4b2384246c0b4537639e9cd04d660fea3246244\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b96314b06953b56383a86305cf1702c5f02add621d7981a36444d2d3998cf0f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b96314b06953b56383a86305cf1702c5f02add621d7981a36444d2d3998cf0f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f0381694561100e524cc380f1a2c6050c7192f27ec8f27ad2e41abce969c1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24f0381694561100e524cc380f1a2c6050c7192f27ec8f27ad2e41abce969c1c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xclfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:08Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.311701 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.311741 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.311752 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.311770 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.311783 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:08Z","lastTransitionTime":"2025-10-01T13:45:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.316399 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wgx5p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c2ca71f-4cb0-4852-927d-af69be5d77f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1180cac382ded8ae1a7be2e5738d96beceed10f750d31e36ae1520416a71e8dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7gtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wgx5p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:08Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.339331 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://56f2691df87d1c843a73c06c39c7ad1d5477fb1f10c6f3f797f658e7d0c0d04d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09ecc57bc8186c325ef0bd38c17882024dda3714b52f9b0295a75ed93b2d6736\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f6f39aa4d204b41067d2705ce828e79752ca4e5b7689821e34c98f5041ce925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddba08349bbb0f7040bb04f37d4b5a137de327a5c4a7a5689396a2a1925b062a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://98143f416c776bf79ee24b42b70d88d1cbe4528bd7a1c0fc886a8f6e711c6400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8442849a4d37d618e701a2ea4c1dd33944894765c874fd5635c74d37a7f411b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8caf90cf5a2d382fcaf4732b5526ef00b1085e9d882b21e599a513188db5cc24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d1219f1caadb9f0992fa56b158a23ace3616d178360c4690657deb351a3ccaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-kzv4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:08Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.359754 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d3d0d59e6a31efc844c56e1ad43cd326a7b2f1844784f2814469e36394cf377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:08Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.377717 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64e2eda677e75448f2e7e3fd477052f3a596e6c11d745848becc6c22f133e6b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:08Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.390866 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6zb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83630902-b99b-4944-81a4-487e9584e0c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0762e1125229327b00202fd05bc17fd641b76f2421e20d0672b3e2d3b0f7538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jt4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6zb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:08Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.403074 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jvqzn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4d5a988-e2c8-47db-b738-cb43467b1bfb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17326e690c100180a8e3d1a0180522378ef995bd1f5cba52d6f147a958351118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-985lh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:45:07Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jvqzn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:08Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.414200 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.414232 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.414241 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.414257 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.414266 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:08Z","lastTransitionTime":"2025-10-01T13:45:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.415539 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:08Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.434613 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eda516c-c3a2-4e46-b9c2-b603ebc2d618\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c15c835868f9e04648db64fcf271a4c212d406e7bcc7cfca15167b853a02a70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d0dbfcc18f63ddc136fc049c023f326cb882adbcf1123f1ec5d4b884ae3970a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed56fd63f436435206d3574740efba75e8a94582fe128388e03b398131acd4ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80bd23f5be42dc5714bba2b0f742b8aa9fdd60540db1048054e76c00f356b240\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83498000b4c2e2ea1b498069626fa6916e9fc5e95a3ecf0136b2cb0ca8a409b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759326292\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759326291\\\\\\\\\\\\\\\" (2025-10-01 12:44:51 +0000 UTC to 2026-10-01 12:44:51 +0000 UTC (now=2025-10-01 13:44:57.347830124 +0000 UTC))\\\\\\\"\\\\nI1001 13:44:57.347872 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1001 13:44:57.347895 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1001 13:44:57.347920 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347949 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347989 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4229714179/tls.crt::/tmp/serving-cert-4229714179/tls.key\\\\\\\"\\\\nI1001 13:44:57.348126 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348139 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1001 13:44:57.348152 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348158 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1001 13:44:57.348154 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1001 13:44:57.348212 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1001 13:44:57.348222 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF1001 13:44:57.350577 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:41Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://113f3599c725570c4484ed005921c30a43db5ffec24b72907c6c7546453fe363\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:08Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.453498 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ca1d91d-0902-4d3a-b66a-a556b5009d8b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acf6d9c6b834cf378303c7ee6e1af3f3cde2502d8f28a6e5d3ec33deb69434b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4200a3723d31af3d800fca144949b047d3ef2960d856f286899351523593061\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://671dc002b0ca1a50b36373cbf0a8971b0f751989c9f19acedb524b09afd53517\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ab08a748b9c3040ea1af963f8ebeef630d7fb260122baba05229615424850d6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:08Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.467120 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c3d55cda00f6c5763662b3f96b4c36f1fb8c220fcd14ff3469b565deac718a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e6578a529a458eab6242d8f667520303c65bd53ceba7598d4c9680c7a93bace\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:08Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.479751 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:08Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.494003 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:08Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.506665 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c3d55cda00f6c5763662b3f96b4c36f1fb8c220fcd14ff3469b565deac718a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e6578a529a458eab6242d8f667520303c65bd53ceba7598d4c9680c7a93bace\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:08Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.517579 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.517615 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.517629 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.517648 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.517661 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:08Z","lastTransitionTime":"2025-10-01T13:45:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.519821 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:08Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.532357 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:08Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.545490 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3023060-c8ae-492b-b1cb-a418d9a8e59f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd1361d1d9cb03a3942918266a1e85d3e370eabdfa7b7b1e40971995928187b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae5b8e3f4bd159c632b04545707c7140ba6fcee21a3a3847d5e7f2b9e41b9178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdjh7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:08Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.570077 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xclfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9fc4aae-03cb-458d-83cb-1a3ab9fa9639\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://14d2a7aa9a0027ef28b5ec7b288ce0a5367c53f616ed3e75216d07c9809a86a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14d2a7aa9a0027ef28b5ec7b288ce0a5367c53f616ed3e75216d07c9809a86a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0abe21aad2433d34c4a769e87e28f3c479511a4580df607d1496ed33c85385c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0abe21aad2433d34c4a769e87e28f3c479511a4580df607d1496ed33c85385c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b870108ecd4816a8d0d3d32f4b2384246c0b4537639e9cd04d660fea3246244\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b870108ecd4816a8d0d3d32f4b2384246c0b4537639e9cd04d660fea3246244\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b96314b06953b56383a86305cf1702c5f02add621d7981a36444d2d3998cf0f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b96314b06953b56383a86305cf1702c5f02add621d7981a36444d2d3998cf0f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f0381694561100e524cc380f1a2c6050c7192f27ec8f27ad2e41abce969c1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24f0381694561100e524cc380f1a2c6050c7192f27ec8f27ad2e41abce969c1c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfb30493a0c650f9536bba6417da0a582b1b23b6488223359856c12d1024d57f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfb30493a0c650f9536bba6417da0a582b1b23b6488223359856c12d1024d57f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xclfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:08Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.586310 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wgx5p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c2ca71f-4cb0-4852-927d-af69be5d77f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1180cac382ded8ae1a7be2e5738d96beceed10f750d31e36ae1520416a71e8dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7gtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wgx5p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:08Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.607426 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://56f2691df87d1c843a73c06c39c7ad1d5477fb1f10c6f3f797f658e7d0c0d04d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09ecc57bc8186c325ef0bd38c17882024dda3714b52f9b0295a75ed93b2d6736\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f6f39aa4d204b41067d2705ce828e79752ca4e5b7689821e34c98f5041ce925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddba08349bbb0f7040bb04f37d4b5a137de327a5c4a7a5689396a2a1925b062a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://98143f416c776bf79ee24b42b70d88d1cbe4528bd7a1c0fc886a8f6e711c6400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8442849a4d37d618e701a2ea4c1dd33944894765c874fd5635c74d37a7f411b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8caf90cf5a2d382fcaf4732b5526ef00b1085e9d882b21e599a513188db5cc24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d1219f1caadb9f0992fa56b158a23ace3616d178360c4690657deb351a3ccaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-kzv4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:08Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.621694 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.621761 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.621774 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.621798 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.621813 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:08Z","lastTransitionTime":"2025-10-01T13:45:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.625209 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d3d0d59e6a31efc844c56e1ad43cd326a7b2f1844784f2814469e36394cf377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:08Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.641142 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64e2eda677e75448f2e7e3fd477052f3a596e6c11d745848becc6c22f133e6b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:08Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.655327 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6zb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83630902-b99b-4944-81a4-487e9584e0c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0762e1125229327b00202fd05bc17fd641b76f2421e20d0672b3e2d3b0f7538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jt4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6zb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:08Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.672264 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jvqzn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4d5a988-e2c8-47db-b738-cb43467b1bfb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17326e690c100180a8e3d1a0180522378ef995bd1f5cba52d6f147a958351118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-985lh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:45:07Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jvqzn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:08Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.697646 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:08Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.716079 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eda516c-c3a2-4e46-b9c2-b603ebc2d618\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c15c835868f9e04648db64fcf271a4c212d406e7bcc7cfca15167b853a02a70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d0dbfcc18f63ddc136fc049c023f326cb882adbcf1123f1ec5d4b884ae3970a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed56fd63f436435206d3574740efba75e8a94582fe128388e03b398131acd4ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80bd23f5be42dc5714bba2b0f742b8aa9fdd60540db1048054e76c00f356b240\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83498000b4c2e2ea1b498069626fa6916e9fc5e95a3ecf0136b2cb0ca8a409b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759326292\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759326291\\\\\\\\\\\\\\\" (2025-10-01 12:44:51 +0000 UTC to 2026-10-01 12:44:51 +0000 UTC (now=2025-10-01 13:44:57.347830124 +0000 UTC))\\\\\\\"\\\\nI1001 13:44:57.347872 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1001 13:44:57.347895 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1001 13:44:57.347920 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347949 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347989 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4229714179/tls.crt::/tmp/serving-cert-4229714179/tls.key\\\\\\\"\\\\nI1001 13:44:57.348126 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348139 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1001 13:44:57.348152 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348158 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1001 13:44:57.348154 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1001 13:44:57.348212 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1001 13:44:57.348222 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF1001 13:44:57.350577 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:41Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://113f3599c725570c4484ed005921c30a43db5ffec24b72907c6c7546453fe363\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:08Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.724731 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.724973 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.725133 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.725406 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.725501 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:08Z","lastTransitionTime":"2025-10-01T13:45:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.741295 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ca1d91d-0902-4d3a-b66a-a556b5009d8b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acf6d9c6b834cf378303c7ee6e1af3f3cde2502d8f28a6e5d3ec33deb69434b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4200a3723d31af3d800fca144949b047d3ef2960d856f286899351523593061\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://671dc002b0ca1a50b36373cbf0a8971b0f751989c9f19acedb524b09afd53517\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ab08a748b9c3040ea1af963f8ebeef630d7fb260122baba05229615424850d6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:08Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.828987 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.829056 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.829075 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.829129 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.829153 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:08Z","lastTransitionTime":"2025-10-01T13:45:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.932065 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.932128 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.932141 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.932155 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:08 crc kubenswrapper[4605]: I1001 13:45:08.932167 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:08Z","lastTransitionTime":"2025-10-01T13:45:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:09 crc kubenswrapper[4605]: I1001 13:45:09.035594 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:09 crc kubenswrapper[4605]: I1001 13:45:09.035638 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:09 crc kubenswrapper[4605]: I1001 13:45:09.035650 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:09 crc kubenswrapper[4605]: I1001 13:45:09.035668 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:09 crc kubenswrapper[4605]: I1001 13:45:09.035680 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:09Z","lastTransitionTime":"2025-10-01T13:45:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:09 crc kubenswrapper[4605]: I1001 13:45:09.138902 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:09 crc kubenswrapper[4605]: I1001 13:45:09.138944 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:09 crc kubenswrapper[4605]: I1001 13:45:09.138954 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:09 crc kubenswrapper[4605]: I1001 13:45:09.138969 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:09 crc kubenswrapper[4605]: I1001 13:45:09.138981 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:09Z","lastTransitionTime":"2025-10-01T13:45:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:09 crc kubenswrapper[4605]: I1001 13:45:09.241275 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:09 crc kubenswrapper[4605]: I1001 13:45:09.241311 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:09 crc kubenswrapper[4605]: I1001 13:45:09.241321 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:09 crc kubenswrapper[4605]: I1001 13:45:09.241334 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:09 crc kubenswrapper[4605]: I1001 13:45:09.241344 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:09Z","lastTransitionTime":"2025-10-01T13:45:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:09 crc kubenswrapper[4605]: I1001 13:45:09.278172 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-xclfn" event={"ID":"b9fc4aae-03cb-458d-83cb-1a3ab9fa9639","Type":"ContainerStarted","Data":"2f8eab67511426f65e14781420480f85ea6b0d9ab25f6846a68820a1ded0053f"} Oct 01 13:45:09 crc kubenswrapper[4605]: I1001 13:45:09.294680 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c3d55cda00f6c5763662b3f96b4c36f1fb8c220fcd14ff3469b565deac718a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e6578a529a458eab6242d8f667520303c65bd53ceba7598d4c9680c7a93bace\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:09Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:09 crc kubenswrapper[4605]: I1001 13:45:09.309377 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:09Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:09 crc kubenswrapper[4605]: I1001 13:45:09.328650 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:09Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:09 crc kubenswrapper[4605]: I1001 13:45:09.344419 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:09 crc kubenswrapper[4605]: I1001 13:45:09.344470 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:09 crc kubenswrapper[4605]: I1001 13:45:09.344482 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:09 crc kubenswrapper[4605]: I1001 13:45:09.344500 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:09 crc kubenswrapper[4605]: I1001 13:45:09.344512 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:09Z","lastTransitionTime":"2025-10-01T13:45:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:09 crc kubenswrapper[4605]: I1001 13:45:09.357708 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://56f2691df87d1c843a73c06c39c7ad1d5477fb1f10c6f3f797f658e7d0c0d04d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09ecc57bc8186c325ef0bd38c17882024dda3714b52f9b0295a75ed93b2d6736\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f6f39aa4d204b41067d2705ce828e79752ca4e5b7689821e34c98f5041ce925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddba08349bbb0f7040bb04f37d4b5a137de327a5c4a7a5689396a2a1925b062a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://98143f416c776bf79ee24b42b70d88d1cbe4528bd7a1c0fc886a8f6e711c6400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8442849a4d37d618e701a2ea4c1dd33944894765c874fd5635c74d37a7f411b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8caf90cf5a2d382fcaf4732b5526ef00b1085e9d882b21e599a513188db5cc24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d1219f1caadb9f0992fa56b158a23ace3616d178360c4690657deb351a3ccaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-kzv4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:09Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:09 crc kubenswrapper[4605]: I1001 13:45:09.380431 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3023060-c8ae-492b-b1cb-a418d9a8e59f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd1361d1d9cb03a3942918266a1e85d3e370eabdfa7b7b1e40971995928187b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae5b8e3f4bd159c632b04545707c7140ba6fcee21a3a3847d5e7f2b9e41b9178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdjh7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:09Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:09 crc kubenswrapper[4605]: I1001 13:45:09.399684 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xclfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9fc4aae-03cb-458d-83cb-1a3ab9fa9639\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f8eab67511426f65e14781420480f85ea6b0d9ab25f6846a68820a1ded0053f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://14d2a7aa9a0027ef28b5ec7b288ce0a5367c53f616ed3e75216d07c9809a86a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14d2a7aa9a0027ef28b5ec7b288ce0a5367c53f616ed3e75216d07c9809a86a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0abe21aad2433d34c4a769e87e28f3c479511a4580df607d1496ed33c85385c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0abe21aad2433d34c4a769e87e28f3c479511a4580df607d1496ed33c85385c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b870108ecd4816a8d0d3d32f4b2384246c0b4537639e9cd04d660fea3246244\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b870108ecd4816a8d0d3d32f4b2384246c0b4537639e9cd04d660fea3246244\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b96314b06953b56383a86305cf1702c5f02add621d7981a36444d2d3998cf0f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b96314b06953b56383a86305cf1702c5f02add621d7981a36444d2d3998cf0f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f0381694561100e524cc380f1a2c6050c7192f27ec8f27ad2e41abce969c1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24f0381694561100e524cc380f1a2c6050c7192f27ec8f27ad2e41abce969c1c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfb30493a0c650f9536bba6417da0a582b1b23b6488223359856c12d1024d57f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfb30493a0c650f9536bba6417da0a582b1b23b6488223359856c12d1024d57f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xclfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:09Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:09 crc kubenswrapper[4605]: I1001 13:45:09.422196 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wgx5p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c2ca71f-4cb0-4852-927d-af69be5d77f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1180cac382ded8ae1a7be2e5738d96beceed10f750d31e36ae1520416a71e8dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7gtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wgx5p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:09Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:09 crc kubenswrapper[4605]: I1001 13:45:09.436977 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jvqzn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4d5a988-e2c8-47db-b738-cb43467b1bfb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17326e690c100180a8e3d1a0180522378ef995bd1f5cba52d6f147a958351118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-985lh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:45:07Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jvqzn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:09Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:09 crc kubenswrapper[4605]: I1001 13:45:09.448367 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:09 crc kubenswrapper[4605]: I1001 13:45:09.448407 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:09 crc kubenswrapper[4605]: I1001 13:45:09.448420 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:09 crc kubenswrapper[4605]: I1001 13:45:09.448900 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:09 crc kubenswrapper[4605]: I1001 13:45:09.448920 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:09Z","lastTransitionTime":"2025-10-01T13:45:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:09 crc kubenswrapper[4605]: I1001 13:45:09.458667 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d3d0d59e6a31efc844c56e1ad43cd326a7b2f1844784f2814469e36394cf377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:09Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:09 crc kubenswrapper[4605]: I1001 13:45:09.476085 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64e2eda677e75448f2e7e3fd477052f3a596e6c11d745848becc6c22f133e6b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:09Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:09 crc kubenswrapper[4605]: I1001 13:45:09.489931 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6zb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83630902-b99b-4944-81a4-487e9584e0c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0762e1125229327b00202fd05bc17fd641b76f2421e20d0672b3e2d3b0f7538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jt4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6zb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:09Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:09 crc kubenswrapper[4605]: I1001 13:45:09.502807 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:09Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:09 crc kubenswrapper[4605]: I1001 13:45:09.519489 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eda516c-c3a2-4e46-b9c2-b603ebc2d618\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c15c835868f9e04648db64fcf271a4c212d406e7bcc7cfca15167b853a02a70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d0dbfcc18f63ddc136fc049c023f326cb882adbcf1123f1ec5d4b884ae3970a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed56fd63f436435206d3574740efba75e8a94582fe128388e03b398131acd4ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80bd23f5be42dc5714bba2b0f742b8aa9fdd60540db1048054e76c00f356b240\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83498000b4c2e2ea1b498069626fa6916e9fc5e95a3ecf0136b2cb0ca8a409b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759326292\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759326291\\\\\\\\\\\\\\\" (2025-10-01 12:44:51 +0000 UTC to 2026-10-01 12:44:51 +0000 UTC (now=2025-10-01 13:44:57.347830124 +0000 UTC))\\\\\\\"\\\\nI1001 13:44:57.347872 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1001 13:44:57.347895 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1001 13:44:57.347920 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347949 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347989 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4229714179/tls.crt::/tmp/serving-cert-4229714179/tls.key\\\\\\\"\\\\nI1001 13:44:57.348126 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348139 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1001 13:44:57.348152 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348158 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1001 13:44:57.348154 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1001 13:44:57.348212 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1001 13:44:57.348222 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF1001 13:44:57.350577 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:41Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://113f3599c725570c4484ed005921c30a43db5ffec24b72907c6c7546453fe363\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:09Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:09 crc kubenswrapper[4605]: I1001 13:45:09.534546 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ca1d91d-0902-4d3a-b66a-a556b5009d8b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acf6d9c6b834cf378303c7ee6e1af3f3cde2502d8f28a6e5d3ec33deb69434b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4200a3723d31af3d800fca144949b047d3ef2960d856f286899351523593061\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://671dc002b0ca1a50b36373cbf0a8971b0f751989c9f19acedb524b09afd53517\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ab08a748b9c3040ea1af963f8ebeef630d7fb260122baba05229615424850d6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:09Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:09 crc kubenswrapper[4605]: I1001 13:45:09.551600 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:09 crc kubenswrapper[4605]: I1001 13:45:09.551653 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:09 crc kubenswrapper[4605]: I1001 13:45:09.551665 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:09 crc kubenswrapper[4605]: I1001 13:45:09.551686 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:09 crc kubenswrapper[4605]: I1001 13:45:09.551703 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:09Z","lastTransitionTime":"2025-10-01T13:45:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:09 crc kubenswrapper[4605]: I1001 13:45:09.654490 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:09 crc kubenswrapper[4605]: I1001 13:45:09.654548 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:09 crc kubenswrapper[4605]: I1001 13:45:09.654564 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:09 crc kubenswrapper[4605]: I1001 13:45:09.654588 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:09 crc kubenswrapper[4605]: I1001 13:45:09.654605 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:09Z","lastTransitionTime":"2025-10-01T13:45:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:09 crc kubenswrapper[4605]: I1001 13:45:09.757397 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:09 crc kubenswrapper[4605]: I1001 13:45:09.757434 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:09 crc kubenswrapper[4605]: I1001 13:45:09.757445 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:09 crc kubenswrapper[4605]: I1001 13:45:09.757462 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:09 crc kubenswrapper[4605]: I1001 13:45:09.757472 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:09Z","lastTransitionTime":"2025-10-01T13:45:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:09 crc kubenswrapper[4605]: I1001 13:45:09.861013 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:09 crc kubenswrapper[4605]: I1001 13:45:09.861050 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:09 crc kubenswrapper[4605]: I1001 13:45:09.861074 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:09 crc kubenswrapper[4605]: I1001 13:45:09.861112 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:09 crc kubenswrapper[4605]: I1001 13:45:09.861127 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:09Z","lastTransitionTime":"2025-10-01T13:45:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:09 crc kubenswrapper[4605]: I1001 13:45:09.926111 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:45:09 crc kubenswrapper[4605]: E1001 13:45:09.926302 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 13:45:09 crc kubenswrapper[4605]: I1001 13:45:09.926796 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 13:45:09 crc kubenswrapper[4605]: E1001 13:45:09.927071 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 13:45:09 crc kubenswrapper[4605]: I1001 13:45:09.927301 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 13:45:09 crc kubenswrapper[4605]: E1001 13:45:09.927395 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 13:45:09 crc kubenswrapper[4605]: I1001 13:45:09.964074 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:09 crc kubenswrapper[4605]: I1001 13:45:09.964147 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:09 crc kubenswrapper[4605]: I1001 13:45:09.964160 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:09 crc kubenswrapper[4605]: I1001 13:45:09.964196 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:09 crc kubenswrapper[4605]: I1001 13:45:09.964217 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:09Z","lastTransitionTime":"2025-10-01T13:45:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:10 crc kubenswrapper[4605]: I1001 13:45:10.067874 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:10 crc kubenswrapper[4605]: I1001 13:45:10.067953 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:10 crc kubenswrapper[4605]: I1001 13:45:10.067967 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:10 crc kubenswrapper[4605]: I1001 13:45:10.067987 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:10 crc kubenswrapper[4605]: I1001 13:45:10.068020 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:10Z","lastTransitionTime":"2025-10-01T13:45:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:10 crc kubenswrapper[4605]: I1001 13:45:10.171531 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:10 crc kubenswrapper[4605]: I1001 13:45:10.171585 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:10 crc kubenswrapper[4605]: I1001 13:45:10.171596 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:10 crc kubenswrapper[4605]: I1001 13:45:10.171615 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:10 crc kubenswrapper[4605]: I1001 13:45:10.171627 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:10Z","lastTransitionTime":"2025-10-01T13:45:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:10 crc kubenswrapper[4605]: I1001 13:45:10.274981 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:10 crc kubenswrapper[4605]: I1001 13:45:10.275039 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:10 crc kubenswrapper[4605]: I1001 13:45:10.275050 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:10 crc kubenswrapper[4605]: I1001 13:45:10.275073 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:10 crc kubenswrapper[4605]: I1001 13:45:10.275087 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:10Z","lastTransitionTime":"2025-10-01T13:45:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:10 crc kubenswrapper[4605]: I1001 13:45:10.283233 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-kzv4p_e0b90c02-c41c-4f5b-ae0a-c6444435a3ae/ovnkube-controller/0.log" Oct 01 13:45:10 crc kubenswrapper[4605]: I1001 13:45:10.287041 4605 generic.go:334] "Generic (PLEG): container finished" podID="e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" containerID="8caf90cf5a2d382fcaf4732b5526ef00b1085e9d882b21e599a513188db5cc24" exitCode=1 Oct 01 13:45:10 crc kubenswrapper[4605]: I1001 13:45:10.287128 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" event={"ID":"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae","Type":"ContainerDied","Data":"8caf90cf5a2d382fcaf4732b5526ef00b1085e9d882b21e599a513188db5cc24"} Oct 01 13:45:10 crc kubenswrapper[4605]: I1001 13:45:10.288207 4605 scope.go:117] "RemoveContainer" containerID="8caf90cf5a2d382fcaf4732b5526ef00b1085e9d882b21e599a513188db5cc24" Oct 01 13:45:10 crc kubenswrapper[4605]: I1001 13:45:10.314927 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eda516c-c3a2-4e46-b9c2-b603ebc2d618\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c15c835868f9e04648db64fcf271a4c212d406e7bcc7cfca15167b853a02a70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d0dbfcc18f63ddc136fc049c023f326cb882adbcf1123f1ec5d4b884ae3970a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed56fd63f436435206d3574740efba75e8a94582fe128388e03b398131acd4ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80bd23f5be42dc5714bba2b0f742b8aa9fdd60540db1048054e76c00f356b240\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83498000b4c2e2ea1b498069626fa6916e9fc5e95a3ecf0136b2cb0ca8a409b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759326292\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759326291\\\\\\\\\\\\\\\" (2025-10-01 12:44:51 +0000 UTC to 2026-10-01 12:44:51 +0000 UTC (now=2025-10-01 13:44:57.347830124 +0000 UTC))\\\\\\\"\\\\nI1001 13:44:57.347872 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1001 13:44:57.347895 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1001 13:44:57.347920 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347949 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347989 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4229714179/tls.crt::/tmp/serving-cert-4229714179/tls.key\\\\\\\"\\\\nI1001 13:44:57.348126 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348139 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1001 13:44:57.348152 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348158 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1001 13:44:57.348154 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1001 13:44:57.348212 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1001 13:44:57.348222 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF1001 13:44:57.350577 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:41Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://113f3599c725570c4484ed005921c30a43db5ffec24b72907c6c7546453fe363\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:10Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:10 crc kubenswrapper[4605]: I1001 13:45:10.339148 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ca1d91d-0902-4d3a-b66a-a556b5009d8b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acf6d9c6b834cf378303c7ee6e1af3f3cde2502d8f28a6e5d3ec33deb69434b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4200a3723d31af3d800fca144949b047d3ef2960d856f286899351523593061\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://671dc002b0ca1a50b36373cbf0a8971b0f751989c9f19acedb524b09afd53517\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ab08a748b9c3040ea1af963f8ebeef630d7fb260122baba05229615424850d6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:10Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:10 crc kubenswrapper[4605]: I1001 13:45:10.354352 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:10Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:10 crc kubenswrapper[4605]: I1001 13:45:10.375029 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c3d55cda00f6c5763662b3f96b4c36f1fb8c220fcd14ff3469b565deac718a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e6578a529a458eab6242d8f667520303c65bd53ceba7598d4c9680c7a93bace\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:10Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:10 crc kubenswrapper[4605]: I1001 13:45:10.379176 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:10 crc kubenswrapper[4605]: I1001 13:45:10.379220 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:10 crc kubenswrapper[4605]: I1001 13:45:10.379232 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:10 crc kubenswrapper[4605]: I1001 13:45:10.379250 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:10 crc kubenswrapper[4605]: I1001 13:45:10.379266 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:10Z","lastTransitionTime":"2025-10-01T13:45:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:10 crc kubenswrapper[4605]: I1001 13:45:10.391755 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:10Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:10 crc kubenswrapper[4605]: I1001 13:45:10.407263 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:10Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:10 crc kubenswrapper[4605]: I1001 13:45:10.422230 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3023060-c8ae-492b-b1cb-a418d9a8e59f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd1361d1d9cb03a3942918266a1e85d3e370eabdfa7b7b1e40971995928187b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae5b8e3f4bd159c632b04545707c7140ba6fcee21a3a3847d5e7f2b9e41b9178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdjh7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:10Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:10 crc kubenswrapper[4605]: I1001 13:45:10.441673 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xclfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9fc4aae-03cb-458d-83cb-1a3ab9fa9639\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f8eab67511426f65e14781420480f85ea6b0d9ab25f6846a68820a1ded0053f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://14d2a7aa9a0027ef28b5ec7b288ce0a5367c53f616ed3e75216d07c9809a86a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14d2a7aa9a0027ef28b5ec7b288ce0a5367c53f616ed3e75216d07c9809a86a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0abe21aad2433d34c4a769e87e28f3c479511a4580df607d1496ed33c85385c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0abe21aad2433d34c4a769e87e28f3c479511a4580df607d1496ed33c85385c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b870108ecd4816a8d0d3d32f4b2384246c0b4537639e9cd04d660fea3246244\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b870108ecd4816a8d0d3d32f4b2384246c0b4537639e9cd04d660fea3246244\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b96314b06953b56383a86305cf1702c5f02add621d7981a36444d2d3998cf0f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b96314b06953b56383a86305cf1702c5f02add621d7981a36444d2d3998cf0f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f0381694561100e524cc380f1a2c6050c7192f27ec8f27ad2e41abce969c1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24f0381694561100e524cc380f1a2c6050c7192f27ec8f27ad2e41abce969c1c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfb30493a0c650f9536bba6417da0a582b1b23b6488223359856c12d1024d57f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfb30493a0c650f9536bba6417da0a582b1b23b6488223359856c12d1024d57f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xclfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:10Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:10 crc kubenswrapper[4605]: I1001 13:45:10.479028 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wgx5p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c2ca71f-4cb0-4852-927d-af69be5d77f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1180cac382ded8ae1a7be2e5738d96beceed10f750d31e36ae1520416a71e8dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7gtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wgx5p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:10Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:10 crc kubenswrapper[4605]: I1001 13:45:10.481397 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:10 crc kubenswrapper[4605]: I1001 13:45:10.481447 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:10 crc kubenswrapper[4605]: I1001 13:45:10.481460 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:10 crc kubenswrapper[4605]: I1001 13:45:10.481478 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:10 crc kubenswrapper[4605]: I1001 13:45:10.481488 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:10Z","lastTransitionTime":"2025-10-01T13:45:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:10 crc kubenswrapper[4605]: I1001 13:45:10.506310 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://56f2691df87d1c843a73c06c39c7ad1d5477fb1f10c6f3f797f658e7d0c0d04d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09ecc57bc8186c325ef0bd38c17882024dda3714b52f9b0295a75ed93b2d6736\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f6f39aa4d204b41067d2705ce828e79752ca4e5b7689821e34c98f5041ce925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddba08349bbb0f7040bb04f37d4b5a137de327a5c4a7a5689396a2a1925b062a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://98143f416c776bf79ee24b42b70d88d1cbe4528bd7a1c0fc886a8f6e711c6400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8442849a4d37d618e701a2ea4c1dd33944894765c874fd5635c74d37a7f411b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8caf90cf5a2d382fcaf4732b5526ef00b1085e9d882b21e599a513188db5cc24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8caf90cf5a2d382fcaf4732b5526ef00b1085e9d882b21e599a513188db5cc24\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T13:45:10Z\\\",\\\"message\\\":\\\"1001 13:45:10.084457 5745 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1001 13:45:10.084473 5745 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1001 13:45:10.084507 5745 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1001 13:45:10.084522 5745 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1001 13:45:10.084556 5745 factory.go:656] Stopping watch factory\\\\nI1001 13:45:10.084591 5745 handler.go:208] Removed *v1.Node event handler 7\\\\nI1001 13:45:10.084615 5745 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1001 13:45:10.084419 5745 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1001 13:45:10.085001 5745 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1001 13:45:10.085025 5745 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1001 13:45:10.085040 5745 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1001 13:45:10.085053 5745 handler.go:208] Removed *v1.Node event handler 2\\\\nI1001 13:45:10.085163 5745 reflector.go:311] Stopping reflector *v1.UserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d1219f1caadb9f0992fa56b158a23ace3616d178360c4690657deb351a3ccaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-kzv4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:10Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:10 crc kubenswrapper[4605]: I1001 13:45:10.533254 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d3d0d59e6a31efc844c56e1ad43cd326a7b2f1844784f2814469e36394cf377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:10Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:10 crc kubenswrapper[4605]: I1001 13:45:10.555009 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64e2eda677e75448f2e7e3fd477052f3a596e6c11d745848becc6c22f133e6b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:10Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:10 crc kubenswrapper[4605]: I1001 13:45:10.602456 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6zb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83630902-b99b-4944-81a4-487e9584e0c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0762e1125229327b00202fd05bc17fd641b76f2421e20d0672b3e2d3b0f7538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jt4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6zb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:10Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:10 crc kubenswrapper[4605]: I1001 13:45:10.603761 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:10 crc kubenswrapper[4605]: I1001 13:45:10.603808 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:10 crc kubenswrapper[4605]: I1001 13:45:10.603821 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:10 crc kubenswrapper[4605]: I1001 13:45:10.603841 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:10 crc kubenswrapper[4605]: I1001 13:45:10.603853 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:10Z","lastTransitionTime":"2025-10-01T13:45:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:10 crc kubenswrapper[4605]: I1001 13:45:10.614471 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jvqzn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4d5a988-e2c8-47db-b738-cb43467b1bfb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17326e690c100180a8e3d1a0180522378ef995bd1f5cba52d6f147a958351118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-985lh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:45:07Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jvqzn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:10Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:10 crc kubenswrapper[4605]: I1001 13:45:10.706085 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:10 crc kubenswrapper[4605]: I1001 13:45:10.706146 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:10 crc kubenswrapper[4605]: I1001 13:45:10.706159 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:10 crc kubenswrapper[4605]: I1001 13:45:10.706176 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:10 crc kubenswrapper[4605]: I1001 13:45:10.706187 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:10Z","lastTransitionTime":"2025-10-01T13:45:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:10 crc kubenswrapper[4605]: I1001 13:45:10.808744 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:10 crc kubenswrapper[4605]: I1001 13:45:10.808785 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:10 crc kubenswrapper[4605]: I1001 13:45:10.808798 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:10 crc kubenswrapper[4605]: I1001 13:45:10.808816 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:10 crc kubenswrapper[4605]: I1001 13:45:10.808831 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:10Z","lastTransitionTime":"2025-10-01T13:45:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:10 crc kubenswrapper[4605]: I1001 13:45:10.913770 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:10 crc kubenswrapper[4605]: I1001 13:45:10.913815 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:10 crc kubenswrapper[4605]: I1001 13:45:10.913827 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:10 crc kubenswrapper[4605]: I1001 13:45:10.913845 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:10 crc kubenswrapper[4605]: I1001 13:45:10.913856 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:10Z","lastTransitionTime":"2025-10-01T13:45:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:11 crc kubenswrapper[4605]: I1001 13:45:11.017131 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:11 crc kubenswrapper[4605]: I1001 13:45:11.017172 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:11 crc kubenswrapper[4605]: I1001 13:45:11.017184 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:11 crc kubenswrapper[4605]: I1001 13:45:11.017202 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:11 crc kubenswrapper[4605]: I1001 13:45:11.017214 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:11Z","lastTransitionTime":"2025-10-01T13:45:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:11 crc kubenswrapper[4605]: I1001 13:45:11.126801 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:11 crc kubenswrapper[4605]: I1001 13:45:11.126913 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:11 crc kubenswrapper[4605]: I1001 13:45:11.126942 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:11 crc kubenswrapper[4605]: I1001 13:45:11.126978 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:11 crc kubenswrapper[4605]: I1001 13:45:11.127002 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:11Z","lastTransitionTime":"2025-10-01T13:45:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:11 crc kubenswrapper[4605]: I1001 13:45:11.230478 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:11 crc kubenswrapper[4605]: I1001 13:45:11.230544 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:11 crc kubenswrapper[4605]: I1001 13:45:11.230563 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:11 crc kubenswrapper[4605]: I1001 13:45:11.230591 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:11 crc kubenswrapper[4605]: I1001 13:45:11.230610 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:11Z","lastTransitionTime":"2025-10-01T13:45:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:11 crc kubenswrapper[4605]: I1001 13:45:11.295813 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-kzv4p_e0b90c02-c41c-4f5b-ae0a-c6444435a3ae/ovnkube-controller/0.log" Oct 01 13:45:11 crc kubenswrapper[4605]: I1001 13:45:11.301948 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" event={"ID":"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae","Type":"ContainerStarted","Data":"516a2c90123655832749bb1f379c3ac1800cbe59053cf56544424f8dabcee07a"} Oct 01 13:45:11 crc kubenswrapper[4605]: I1001 13:45:11.302791 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" Oct 01 13:45:11 crc kubenswrapper[4605]: I1001 13:45:11.326616 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d3d0d59e6a31efc844c56e1ad43cd326a7b2f1844784f2814469e36394cf377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:11Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:11 crc kubenswrapper[4605]: I1001 13:45:11.333394 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:11 crc kubenswrapper[4605]: I1001 13:45:11.333477 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:11 crc kubenswrapper[4605]: I1001 13:45:11.333536 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:11 crc kubenswrapper[4605]: I1001 13:45:11.333572 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:11 crc kubenswrapper[4605]: I1001 13:45:11.333608 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:11Z","lastTransitionTime":"2025-10-01T13:45:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:11 crc kubenswrapper[4605]: I1001 13:45:11.348337 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64e2eda677e75448f2e7e3fd477052f3a596e6c11d745848becc6c22f133e6b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:11Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:11 crc kubenswrapper[4605]: I1001 13:45:11.369199 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6zb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83630902-b99b-4944-81a4-487e9584e0c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0762e1125229327b00202fd05bc17fd641b76f2421e20d0672b3e2d3b0f7538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jt4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6zb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:11Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:11 crc kubenswrapper[4605]: I1001 13:45:11.385747 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jvqzn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4d5a988-e2c8-47db-b738-cb43467b1bfb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17326e690c100180a8e3d1a0180522378ef995bd1f5cba52d6f147a958351118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-985lh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:45:07Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jvqzn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:11Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:11 crc kubenswrapper[4605]: I1001 13:45:11.405557 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:11Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:11 crc kubenswrapper[4605]: I1001 13:45:11.425393 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eda516c-c3a2-4e46-b9c2-b603ebc2d618\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c15c835868f9e04648db64fcf271a4c212d406e7bcc7cfca15167b853a02a70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d0dbfcc18f63ddc136fc049c023f326cb882adbcf1123f1ec5d4b884ae3970a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed56fd63f436435206d3574740efba75e8a94582fe128388e03b398131acd4ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80bd23f5be42dc5714bba2b0f742b8aa9fdd60540db1048054e76c00f356b240\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83498000b4c2e2ea1b498069626fa6916e9fc5e95a3ecf0136b2cb0ca8a409b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759326292\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759326291\\\\\\\\\\\\\\\" (2025-10-01 12:44:51 +0000 UTC to 2026-10-01 12:44:51 +0000 UTC (now=2025-10-01 13:44:57.347830124 +0000 UTC))\\\\\\\"\\\\nI1001 13:44:57.347872 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1001 13:44:57.347895 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1001 13:44:57.347920 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347949 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347989 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4229714179/tls.crt::/tmp/serving-cert-4229714179/tls.key\\\\\\\"\\\\nI1001 13:44:57.348126 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348139 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1001 13:44:57.348152 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348158 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1001 13:44:57.348154 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1001 13:44:57.348212 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1001 13:44:57.348222 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF1001 13:44:57.350577 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:41Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://113f3599c725570c4484ed005921c30a43db5ffec24b72907c6c7546453fe363\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:11Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:11 crc kubenswrapper[4605]: I1001 13:45:11.437553 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:11 crc kubenswrapper[4605]: I1001 13:45:11.437596 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:11 crc kubenswrapper[4605]: I1001 13:45:11.437610 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:11 crc kubenswrapper[4605]: I1001 13:45:11.437650 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:11 crc kubenswrapper[4605]: I1001 13:45:11.437663 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:11Z","lastTransitionTime":"2025-10-01T13:45:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:11 crc kubenswrapper[4605]: I1001 13:45:11.443122 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ca1d91d-0902-4d3a-b66a-a556b5009d8b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acf6d9c6b834cf378303c7ee6e1af3f3cde2502d8f28a6e5d3ec33deb69434b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4200a3723d31af3d800fca144949b047d3ef2960d856f286899351523593061\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://671dc002b0ca1a50b36373cbf0a8971b0f751989c9f19acedb524b09afd53517\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ab08a748b9c3040ea1af963f8ebeef630d7fb260122baba05229615424850d6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:11Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:11 crc kubenswrapper[4605]: I1001 13:45:11.460981 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c3d55cda00f6c5763662b3f96b4c36f1fb8c220fcd14ff3469b565deac718a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e6578a529a458eab6242d8f667520303c65bd53ceba7598d4c9680c7a93bace\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:11Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:11 crc kubenswrapper[4605]: I1001 13:45:11.477656 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:11Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:11 crc kubenswrapper[4605]: I1001 13:45:11.493269 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:11Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:11 crc kubenswrapper[4605]: I1001 13:45:11.510665 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3023060-c8ae-492b-b1cb-a418d9a8e59f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd1361d1d9cb03a3942918266a1e85d3e370eabdfa7b7b1e40971995928187b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae5b8e3f4bd159c632b04545707c7140ba6fcee21a3a3847d5e7f2b9e41b9178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdjh7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:11Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:11 crc kubenswrapper[4605]: I1001 13:45:11.529712 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xclfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9fc4aae-03cb-458d-83cb-1a3ab9fa9639\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f8eab67511426f65e14781420480f85ea6b0d9ab25f6846a68820a1ded0053f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://14d2a7aa9a0027ef28b5ec7b288ce0a5367c53f616ed3e75216d07c9809a86a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14d2a7aa9a0027ef28b5ec7b288ce0a5367c53f616ed3e75216d07c9809a86a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0abe21aad2433d34c4a769e87e28f3c479511a4580df607d1496ed33c85385c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0abe21aad2433d34c4a769e87e28f3c479511a4580df607d1496ed33c85385c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b870108ecd4816a8d0d3d32f4b2384246c0b4537639e9cd04d660fea3246244\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b870108ecd4816a8d0d3d32f4b2384246c0b4537639e9cd04d660fea3246244\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b96314b06953b56383a86305cf1702c5f02add621d7981a36444d2d3998cf0f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b96314b06953b56383a86305cf1702c5f02add621d7981a36444d2d3998cf0f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f0381694561100e524cc380f1a2c6050c7192f27ec8f27ad2e41abce969c1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24f0381694561100e524cc380f1a2c6050c7192f27ec8f27ad2e41abce969c1c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfb30493a0c650f9536bba6417da0a582b1b23b6488223359856c12d1024d57f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfb30493a0c650f9536bba6417da0a582b1b23b6488223359856c12d1024d57f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xclfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:11Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:11 crc kubenswrapper[4605]: I1001 13:45:11.540023 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:11 crc kubenswrapper[4605]: I1001 13:45:11.540121 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:11 crc kubenswrapper[4605]: I1001 13:45:11.540135 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:11 crc kubenswrapper[4605]: I1001 13:45:11.540154 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:11 crc kubenswrapper[4605]: I1001 13:45:11.540165 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:11Z","lastTransitionTime":"2025-10-01T13:45:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:11 crc kubenswrapper[4605]: I1001 13:45:11.548458 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wgx5p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c2ca71f-4cb0-4852-927d-af69be5d77f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1180cac382ded8ae1a7be2e5738d96beceed10f750d31e36ae1520416a71e8dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7gtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wgx5p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:11Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:11 crc kubenswrapper[4605]: I1001 13:45:11.573694 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://56f2691df87d1c843a73c06c39c7ad1d5477fb1f10c6f3f797f658e7d0c0d04d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09ecc57bc8186c325ef0bd38c17882024dda3714b52f9b0295a75ed93b2d6736\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f6f39aa4d204b41067d2705ce828e79752ca4e5b7689821e34c98f5041ce925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddba08349bbb0f7040bb04f37d4b5a137de327a5c4a7a5689396a2a1925b062a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://98143f416c776bf79ee24b42b70d88d1cbe4528bd7a1c0fc886a8f6e711c6400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8442849a4d37d618e701a2ea4c1dd33944894765c874fd5635c74d37a7f411b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://516a2c90123655832749bb1f379c3ac1800cbe59053cf56544424f8dabcee07a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8caf90cf5a2d382fcaf4732b5526ef00b1085e9d882b21e599a513188db5cc24\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T13:45:10Z\\\",\\\"message\\\":\\\"1001 13:45:10.084457 5745 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1001 13:45:10.084473 5745 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1001 13:45:10.084507 5745 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1001 13:45:10.084522 5745 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1001 13:45:10.084556 5745 factory.go:656] Stopping watch factory\\\\nI1001 13:45:10.084591 5745 handler.go:208] Removed *v1.Node event handler 7\\\\nI1001 13:45:10.084615 5745 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1001 13:45:10.084419 5745 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1001 13:45:10.085001 5745 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1001 13:45:10.085025 5745 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1001 13:45:10.085040 5745 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1001 13:45:10.085053 5745 handler.go:208] Removed *v1.Node event handler 2\\\\nI1001 13:45:10.085163 5745 reflector.go:311] Stopping reflector *v1.UserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:06Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d1219f1caadb9f0992fa56b158a23ace3616d178360c4690657deb351a3ccaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-kzv4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:11Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:11 crc kubenswrapper[4605]: I1001 13:45:11.644152 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:11 crc kubenswrapper[4605]: I1001 13:45:11.644269 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:11 crc kubenswrapper[4605]: I1001 13:45:11.644290 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:11 crc kubenswrapper[4605]: I1001 13:45:11.644352 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:11 crc kubenswrapper[4605]: I1001 13:45:11.644378 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:11Z","lastTransitionTime":"2025-10-01T13:45:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:11 crc kubenswrapper[4605]: I1001 13:45:11.747801 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:11 crc kubenswrapper[4605]: I1001 13:45:11.747864 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:11 crc kubenswrapper[4605]: I1001 13:45:11.747876 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:11 crc kubenswrapper[4605]: I1001 13:45:11.747895 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:11 crc kubenswrapper[4605]: I1001 13:45:11.747909 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:11Z","lastTransitionTime":"2025-10-01T13:45:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:11 crc kubenswrapper[4605]: I1001 13:45:11.851431 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:11 crc kubenswrapper[4605]: I1001 13:45:11.851524 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:11 crc kubenswrapper[4605]: I1001 13:45:11.851538 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:11 crc kubenswrapper[4605]: I1001 13:45:11.851558 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:11 crc kubenswrapper[4605]: I1001 13:45:11.851571 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:11Z","lastTransitionTime":"2025-10-01T13:45:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:11 crc kubenswrapper[4605]: I1001 13:45:11.925686 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 13:45:11 crc kubenswrapper[4605]: I1001 13:45:11.925741 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:45:11 crc kubenswrapper[4605]: E1001 13:45:11.925832 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 13:45:11 crc kubenswrapper[4605]: E1001 13:45:11.925925 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 13:45:11 crc kubenswrapper[4605]: I1001 13:45:11.926338 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 13:45:11 crc kubenswrapper[4605]: E1001 13:45:11.926402 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 13:45:11 crc kubenswrapper[4605]: I1001 13:45:11.953930 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:11 crc kubenswrapper[4605]: I1001 13:45:11.953998 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:11 crc kubenswrapper[4605]: I1001 13:45:11.954021 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:11 crc kubenswrapper[4605]: I1001 13:45:11.954049 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:11 crc kubenswrapper[4605]: I1001 13:45:11.954072 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:11Z","lastTransitionTime":"2025-10-01T13:45:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.056998 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.057045 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.057057 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.057075 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.057114 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:12Z","lastTransitionTime":"2025-10-01T13:45:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.160502 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.160794 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.160920 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.161017 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.161140 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:12Z","lastTransitionTime":"2025-10-01T13:45:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.263817 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.263871 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.263887 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.263912 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.263931 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:12Z","lastTransitionTime":"2025-10-01T13:45:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.308605 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-kzv4p_e0b90c02-c41c-4f5b-ae0a-c6444435a3ae/ovnkube-controller/1.log" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.309802 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-kzv4p_e0b90c02-c41c-4f5b-ae0a-c6444435a3ae/ovnkube-controller/0.log" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.313400 4605 generic.go:334] "Generic (PLEG): container finished" podID="e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" containerID="516a2c90123655832749bb1f379c3ac1800cbe59053cf56544424f8dabcee07a" exitCode=1 Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.313453 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" event={"ID":"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae","Type":"ContainerDied","Data":"516a2c90123655832749bb1f379c3ac1800cbe59053cf56544424f8dabcee07a"} Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.313578 4605 scope.go:117] "RemoveContainer" containerID="8caf90cf5a2d382fcaf4732b5526ef00b1085e9d882b21e599a513188db5cc24" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.314658 4605 scope.go:117] "RemoveContainer" containerID="516a2c90123655832749bb1f379c3ac1800cbe59053cf56544424f8dabcee07a" Oct 01 13:45:12 crc kubenswrapper[4605]: E1001 13:45:12.314978 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-kzv4p_openshift-ovn-kubernetes(e0b90c02-c41c-4f5b-ae0a-c6444435a3ae)\"" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" podUID="e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.331864 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c3d55cda00f6c5763662b3f96b4c36f1fb8c220fcd14ff3469b565deac718a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e6578a529a458eab6242d8f667520303c65bd53ceba7598d4c9680c7a93bace\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:12Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.346984 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:12Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.364943 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:12Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.369322 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.369472 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.369546 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.369643 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.369731 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:12Z","lastTransitionTime":"2025-10-01T13:45:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.392081 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3023060-c8ae-492b-b1cb-a418d9a8e59f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd1361d1d9cb03a3942918266a1e85d3e370eabdfa7b7b1e40971995928187b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae5b8e3f4bd159c632b04545707c7140ba6fcee21a3a3847d5e7f2b9e41b9178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdjh7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:12Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.409696 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xclfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9fc4aae-03cb-458d-83cb-1a3ab9fa9639\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f8eab67511426f65e14781420480f85ea6b0d9ab25f6846a68820a1ded0053f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://14d2a7aa9a0027ef28b5ec7b288ce0a5367c53f616ed3e75216d07c9809a86a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14d2a7aa9a0027ef28b5ec7b288ce0a5367c53f616ed3e75216d07c9809a86a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0abe21aad2433d34c4a769e87e28f3c479511a4580df607d1496ed33c85385c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0abe21aad2433d34c4a769e87e28f3c479511a4580df607d1496ed33c85385c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b870108ecd4816a8d0d3d32f4b2384246c0b4537639e9cd04d660fea3246244\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b870108ecd4816a8d0d3d32f4b2384246c0b4537639e9cd04d660fea3246244\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b96314b06953b56383a86305cf1702c5f02add621d7981a36444d2d3998cf0f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b96314b06953b56383a86305cf1702c5f02add621d7981a36444d2d3998cf0f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f0381694561100e524cc380f1a2c6050c7192f27ec8f27ad2e41abce969c1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24f0381694561100e524cc380f1a2c6050c7192f27ec8f27ad2e41abce969c1c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfb30493a0c650f9536bba6417da0a582b1b23b6488223359856c12d1024d57f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfb30493a0c650f9536bba6417da0a582b1b23b6488223359856c12d1024d57f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xclfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:12Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.428238 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wgx5p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c2ca71f-4cb0-4852-927d-af69be5d77f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1180cac382ded8ae1a7be2e5738d96beceed10f750d31e36ae1520416a71e8dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7gtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wgx5p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:12Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.450958 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://56f2691df87d1c843a73c06c39c7ad1d5477fb1f10c6f3f797f658e7d0c0d04d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09ecc57bc8186c325ef0bd38c17882024dda3714b52f9b0295a75ed93b2d6736\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f6f39aa4d204b41067d2705ce828e79752ca4e5b7689821e34c98f5041ce925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddba08349bbb0f7040bb04f37d4b5a137de327a5c4a7a5689396a2a1925b062a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://98143f416c776bf79ee24b42b70d88d1cbe4528bd7a1c0fc886a8f6e711c6400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8442849a4d37d618e701a2ea4c1dd33944894765c874fd5635c74d37a7f411b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://516a2c90123655832749bb1f379c3ac1800cbe59053cf56544424f8dabcee07a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8caf90cf5a2d382fcaf4732b5526ef00b1085e9d882b21e599a513188db5cc24\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T13:45:10Z\\\",\\\"message\\\":\\\"1001 13:45:10.084457 5745 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1001 13:45:10.084473 5745 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1001 13:45:10.084507 5745 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1001 13:45:10.084522 5745 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1001 13:45:10.084556 5745 factory.go:656] Stopping watch factory\\\\nI1001 13:45:10.084591 5745 handler.go:208] Removed *v1.Node event handler 7\\\\nI1001 13:45:10.084615 5745 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1001 13:45:10.084419 5745 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1001 13:45:10.085001 5745 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1001 13:45:10.085025 5745 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1001 13:45:10.085040 5745 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1001 13:45:10.085053 5745 handler.go:208] Removed *v1.Node event handler 2\\\\nI1001 13:45:10.085163 5745 reflector.go:311] Stopping reflector *v1.UserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:06Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://516a2c90123655832749bb1f379c3ac1800cbe59053cf56544424f8dabcee07a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T13:45:11Z\\\",\\\"message\\\":\\\"openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:serving-cert service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc0076549ef \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,Protocol:TCP,Port:443,TargetPort:{0 8443 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{app: service-ca-operator,},ClusterIP:10.217.4.40,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.4.40],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nF1001 13:45:11.204670 5928 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 0x1e60020 0x1e5ffc0} was not added to shared informer because it has stopped already, failed to\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d1219f1caadb9f0992fa56b158a23ace3616d178360c4690657deb351a3ccaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-kzv4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:12Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.472623 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.472993 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.473159 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.473236 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.473318 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:12Z","lastTransitionTime":"2025-10-01T13:45:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.474188 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d3d0d59e6a31efc844c56e1ad43cd326a7b2f1844784f2814469e36394cf377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:12Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.489618 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64e2eda677e75448f2e7e3fd477052f3a596e6c11d745848becc6c22f133e6b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:12Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.506145 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6zb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83630902-b99b-4944-81a4-487e9584e0c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0762e1125229327b00202fd05bc17fd641b76f2421e20d0672b3e2d3b0f7538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jt4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6zb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:12Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.511519 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gqbr2"] Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.512314 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gqbr2" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.514481 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.518433 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.531979 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jvqzn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4d5a988-e2c8-47db-b738-cb43467b1bfb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17326e690c100180a8e3d1a0180522378ef995bd1f5cba52d6f147a958351118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-985lh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:45:07Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jvqzn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:12Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.554801 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eda516c-c3a2-4e46-b9c2-b603ebc2d618\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c15c835868f9e04648db64fcf271a4c212d406e7bcc7cfca15167b853a02a70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d0dbfcc18f63ddc136fc049c023f326cb882adbcf1123f1ec5d4b884ae3970a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed56fd63f436435206d3574740efba75e8a94582fe128388e03b398131acd4ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80bd23f5be42dc5714bba2b0f742b8aa9fdd60540db1048054e76c00f356b240\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83498000b4c2e2ea1b498069626fa6916e9fc5e95a3ecf0136b2cb0ca8a409b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759326292\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759326291\\\\\\\\\\\\\\\" (2025-10-01 12:44:51 +0000 UTC to 2026-10-01 12:44:51 +0000 UTC (now=2025-10-01 13:44:57.347830124 +0000 UTC))\\\\\\\"\\\\nI1001 13:44:57.347872 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1001 13:44:57.347895 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1001 13:44:57.347920 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347949 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347989 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4229714179/tls.crt::/tmp/serving-cert-4229714179/tls.key\\\\\\\"\\\\nI1001 13:44:57.348126 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348139 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1001 13:44:57.348152 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348158 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1001 13:44:57.348154 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1001 13:44:57.348212 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1001 13:44:57.348222 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF1001 13:44:57.350577 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:41Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://113f3599c725570c4484ed005921c30a43db5ffec24b72907c6c7546453fe363\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:12Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.574187 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ca1d91d-0902-4d3a-b66a-a556b5009d8b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acf6d9c6b834cf378303c7ee6e1af3f3cde2502d8f28a6e5d3ec33deb69434b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4200a3723d31af3d800fca144949b047d3ef2960d856f286899351523593061\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://671dc002b0ca1a50b36373cbf0a8971b0f751989c9f19acedb524b09afd53517\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ab08a748b9c3040ea1af963f8ebeef630d7fb260122baba05229615424850d6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:12Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.575901 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.575929 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.575939 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.575959 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.575974 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:12Z","lastTransitionTime":"2025-10-01T13:45:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.589916 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:12Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.608085 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3023060-c8ae-492b-b1cb-a418d9a8e59f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd1361d1d9cb03a3942918266a1e85d3e370eabdfa7b7b1e40971995928187b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae5b8e3f4bd159c632b04545707c7140ba6fcee21a3a3847d5e7f2b9e41b9178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdjh7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:12Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.626631 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/9a6400c9-7945-44a6-b37d-e94811fc9754-env-overrides\") pod \"ovnkube-control-plane-749d76644c-gqbr2\" (UID: \"9a6400c9-7945-44a6-b37d-e94811fc9754\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gqbr2" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.626681 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n2jrz\" (UniqueName: \"kubernetes.io/projected/9a6400c9-7945-44a6-b37d-e94811fc9754-kube-api-access-n2jrz\") pod \"ovnkube-control-plane-749d76644c-gqbr2\" (UID: \"9a6400c9-7945-44a6-b37d-e94811fc9754\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gqbr2" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.626721 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/9a6400c9-7945-44a6-b37d-e94811fc9754-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-gqbr2\" (UID: \"9a6400c9-7945-44a6-b37d-e94811fc9754\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gqbr2" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.626780 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/9a6400c9-7945-44a6-b37d-e94811fc9754-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-gqbr2\" (UID: \"9a6400c9-7945-44a6-b37d-e94811fc9754\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gqbr2" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.627942 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xclfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9fc4aae-03cb-458d-83cb-1a3ab9fa9639\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f8eab67511426f65e14781420480f85ea6b0d9ab25f6846a68820a1ded0053f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://14d2a7aa9a0027ef28b5ec7b288ce0a5367c53f616ed3e75216d07c9809a86a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14d2a7aa9a0027ef28b5ec7b288ce0a5367c53f616ed3e75216d07c9809a86a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0abe21aad2433d34c4a769e87e28f3c479511a4580df607d1496ed33c85385c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0abe21aad2433d34c4a769e87e28f3c479511a4580df607d1496ed33c85385c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b870108ecd4816a8d0d3d32f4b2384246c0b4537639e9cd04d660fea3246244\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b870108ecd4816a8d0d3d32f4b2384246c0b4537639e9cd04d660fea3246244\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b96314b06953b56383a86305cf1702c5f02add621d7981a36444d2d3998cf0f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b96314b06953b56383a86305cf1702c5f02add621d7981a36444d2d3998cf0f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f0381694561100e524cc380f1a2c6050c7192f27ec8f27ad2e41abce969c1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24f0381694561100e524cc380f1a2c6050c7192f27ec8f27ad2e41abce969c1c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfb30493a0c650f9536bba6417da0a582b1b23b6488223359856c12d1024d57f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfb30493a0c650f9536bba6417da0a582b1b23b6488223359856c12d1024d57f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xclfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:12Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.641228 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wgx5p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c2ca71f-4cb0-4852-927d-af69be5d77f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1180cac382ded8ae1a7be2e5738d96beceed10f750d31e36ae1520416a71e8dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7gtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wgx5p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:12Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.660316 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://56f2691df87d1c843a73c06c39c7ad1d5477fb1f10c6f3f797f658e7d0c0d04d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09ecc57bc8186c325ef0bd38c17882024dda3714b52f9b0295a75ed93b2d6736\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f6f39aa4d204b41067d2705ce828e79752ca4e5b7689821e34c98f5041ce925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddba08349bbb0f7040bb04f37d4b5a137de327a5c4a7a5689396a2a1925b062a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://98143f416c776bf79ee24b42b70d88d1cbe4528bd7a1c0fc886a8f6e711c6400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8442849a4d37d618e701a2ea4c1dd33944894765c874fd5635c74d37a7f411b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://516a2c90123655832749bb1f379c3ac1800cbe59053cf56544424f8dabcee07a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8caf90cf5a2d382fcaf4732b5526ef00b1085e9d882b21e599a513188db5cc24\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T13:45:10Z\\\",\\\"message\\\":\\\"1001 13:45:10.084457 5745 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1001 13:45:10.084473 5745 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1001 13:45:10.084507 5745 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1001 13:45:10.084522 5745 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1001 13:45:10.084556 5745 factory.go:656] Stopping watch factory\\\\nI1001 13:45:10.084591 5745 handler.go:208] Removed *v1.Node event handler 7\\\\nI1001 13:45:10.084615 5745 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1001 13:45:10.084419 5745 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1001 13:45:10.085001 5745 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1001 13:45:10.085025 5745 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1001 13:45:10.085040 5745 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1001 13:45:10.085053 5745 handler.go:208] Removed *v1.Node event handler 2\\\\nI1001 13:45:10.085163 5745 reflector.go:311] Stopping reflector *v1.UserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:06Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://516a2c90123655832749bb1f379c3ac1800cbe59053cf56544424f8dabcee07a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T13:45:11Z\\\",\\\"message\\\":\\\"openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:serving-cert service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc0076549ef \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,Protocol:TCP,Port:443,TargetPort:{0 8443 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{app: service-ca-operator,},ClusterIP:10.217.4.40,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.4.40],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nF1001 13:45:11.204670 5928 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 0x1e60020 0x1e5ffc0} was not added to shared informer because it has stopped already, failed to\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d1219f1caadb9f0992fa56b158a23ace3616d178360c4690657deb351a3ccaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-kzv4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:12Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.674611 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gqbr2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a6400c9-7945-44a6-b37d-e94811fc9754\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2jrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2jrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:45:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-gqbr2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:12Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.678359 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.678425 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.678439 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.678465 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.678479 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:12Z","lastTransitionTime":"2025-10-01T13:45:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.687726 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d3d0d59e6a31efc844c56e1ad43cd326a7b2f1844784f2814469e36394cf377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:12Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.702622 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64e2eda677e75448f2e7e3fd477052f3a596e6c11d745848becc6c22f133e6b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:12Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.715856 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6zb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83630902-b99b-4944-81a4-487e9584e0c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0762e1125229327b00202fd05bc17fd641b76f2421e20d0672b3e2d3b0f7538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jt4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6zb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:12Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.727784 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/9a6400c9-7945-44a6-b37d-e94811fc9754-env-overrides\") pod \"ovnkube-control-plane-749d76644c-gqbr2\" (UID: \"9a6400c9-7945-44a6-b37d-e94811fc9754\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gqbr2" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.727827 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/9a6400c9-7945-44a6-b37d-e94811fc9754-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-gqbr2\" (UID: \"9a6400c9-7945-44a6-b37d-e94811fc9754\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gqbr2" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.727845 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n2jrz\" (UniqueName: \"kubernetes.io/projected/9a6400c9-7945-44a6-b37d-e94811fc9754-kube-api-access-n2jrz\") pod \"ovnkube-control-plane-749d76644c-gqbr2\" (UID: \"9a6400c9-7945-44a6-b37d-e94811fc9754\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gqbr2" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.727867 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/9a6400c9-7945-44a6-b37d-e94811fc9754-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-gqbr2\" (UID: \"9a6400c9-7945-44a6-b37d-e94811fc9754\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gqbr2" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.728538 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/9a6400c9-7945-44a6-b37d-e94811fc9754-env-overrides\") pod \"ovnkube-control-plane-749d76644c-gqbr2\" (UID: \"9a6400c9-7945-44a6-b37d-e94811fc9754\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gqbr2" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.728633 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/9a6400c9-7945-44a6-b37d-e94811fc9754-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-gqbr2\" (UID: \"9a6400c9-7945-44a6-b37d-e94811fc9754\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gqbr2" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.733261 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jvqzn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4d5a988-e2c8-47db-b738-cb43467b1bfb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17326e690c100180a8e3d1a0180522378ef995bd1f5cba52d6f147a958351118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-985lh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:45:07Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jvqzn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:12Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.735724 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/9a6400c9-7945-44a6-b37d-e94811fc9754-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-gqbr2\" (UID: \"9a6400c9-7945-44a6-b37d-e94811fc9754\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gqbr2" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.746634 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n2jrz\" (UniqueName: \"kubernetes.io/projected/9a6400c9-7945-44a6-b37d-e94811fc9754-kube-api-access-n2jrz\") pod \"ovnkube-control-plane-749d76644c-gqbr2\" (UID: \"9a6400c9-7945-44a6-b37d-e94811fc9754\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gqbr2" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.748708 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eda516c-c3a2-4e46-b9c2-b603ebc2d618\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c15c835868f9e04648db64fcf271a4c212d406e7bcc7cfca15167b853a02a70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d0dbfcc18f63ddc136fc049c023f326cb882adbcf1123f1ec5d4b884ae3970a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed56fd63f436435206d3574740efba75e8a94582fe128388e03b398131acd4ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80bd23f5be42dc5714bba2b0f742b8aa9fdd60540db1048054e76c00f356b240\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83498000b4c2e2ea1b498069626fa6916e9fc5e95a3ecf0136b2cb0ca8a409b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759326292\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759326291\\\\\\\\\\\\\\\" (2025-10-01 12:44:51 +0000 UTC to 2026-10-01 12:44:51 +0000 UTC (now=2025-10-01 13:44:57.347830124 +0000 UTC))\\\\\\\"\\\\nI1001 13:44:57.347872 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1001 13:44:57.347895 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1001 13:44:57.347920 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347949 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347989 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4229714179/tls.crt::/tmp/serving-cert-4229714179/tls.key\\\\\\\"\\\\nI1001 13:44:57.348126 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348139 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1001 13:44:57.348152 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348158 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1001 13:44:57.348154 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1001 13:44:57.348212 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1001 13:44:57.348222 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF1001 13:44:57.350577 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:41Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://113f3599c725570c4484ed005921c30a43db5ffec24b72907c6c7546453fe363\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:12Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.763738 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ca1d91d-0902-4d3a-b66a-a556b5009d8b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acf6d9c6b834cf378303c7ee6e1af3f3cde2502d8f28a6e5d3ec33deb69434b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4200a3723d31af3d800fca144949b047d3ef2960d856f286899351523593061\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://671dc002b0ca1a50b36373cbf0a8971b0f751989c9f19acedb524b09afd53517\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ab08a748b9c3040ea1af963f8ebeef630d7fb260122baba05229615424850d6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:12Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.777684 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:12Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.782622 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.782678 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.782697 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.782722 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.782740 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:12Z","lastTransitionTime":"2025-10-01T13:45:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.794218 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c3d55cda00f6c5763662b3f96b4c36f1fb8c220fcd14ff3469b565deac718a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e6578a529a458eab6242d8f667520303c65bd53ceba7598d4c9680c7a93bace\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:12Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.808280 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:12Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.827040 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:12Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.833396 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gqbr2" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.886259 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.886302 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.886313 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.886335 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.886348 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:12Z","lastTransitionTime":"2025-10-01T13:45:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.988995 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.989111 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.989125 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.989142 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:12 crc kubenswrapper[4605]: I1001 13:45:12.989157 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:12Z","lastTransitionTime":"2025-10-01T13:45:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.091924 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.091967 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.091980 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.091997 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.092014 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:13Z","lastTransitionTime":"2025-10-01T13:45:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.194762 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.194829 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.194849 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.194874 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.194891 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:13Z","lastTransitionTime":"2025-10-01T13:45:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.297375 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.297425 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.297438 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.297458 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.297472 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:13Z","lastTransitionTime":"2025-10-01T13:45:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.320965 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gqbr2" event={"ID":"9a6400c9-7945-44a6-b37d-e94811fc9754","Type":"ContainerStarted","Data":"51a4fc25d3fb27e7ea7fba17367d9a0b3fefa388d306e3589ec13e3cf6e61452"} Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.321025 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gqbr2" event={"ID":"9a6400c9-7945-44a6-b37d-e94811fc9754","Type":"ContainerStarted","Data":"2c32729bcc338f810b05ad3ac0cd10feeeaff031cd81540b79fec759bbc2b419"} Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.321040 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gqbr2" event={"ID":"9a6400c9-7945-44a6-b37d-e94811fc9754","Type":"ContainerStarted","Data":"5e7f9b68ff50befadc713d83a9a1d57bee28ac28364377de23d168999cbdd339"} Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.324022 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-kzv4p_e0b90c02-c41c-4f5b-ae0a-c6444435a3ae/ovnkube-controller/1.log" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.327774 4605 scope.go:117] "RemoveContainer" containerID="516a2c90123655832749bb1f379c3ac1800cbe59053cf56544424f8dabcee07a" Oct 01 13:45:13 crc kubenswrapper[4605]: E1001 13:45:13.328008 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-kzv4p_openshift-ovn-kubernetes(e0b90c02-c41c-4f5b-ae0a-c6444435a3ae)\"" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" podUID="e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.336688 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6zb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83630902-b99b-4944-81a4-487e9584e0c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0762e1125229327b00202fd05bc17fd641b76f2421e20d0672b3e2d3b0f7538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jt4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6zb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:13Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.351128 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jvqzn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4d5a988-e2c8-47db-b738-cb43467b1bfb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17326e690c100180a8e3d1a0180522378ef995bd1f5cba52d6f147a958351118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-985lh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:45:07Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jvqzn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:13Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.369634 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d3d0d59e6a31efc844c56e1ad43cd326a7b2f1844784f2814469e36394cf377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:13Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.389439 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64e2eda677e75448f2e7e3fd477052f3a596e6c11d745848becc6c22f133e6b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:13Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.400221 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.400276 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.400289 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.400310 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.400342 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:13Z","lastTransitionTime":"2025-10-01T13:45:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.408397 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ca1d91d-0902-4d3a-b66a-a556b5009d8b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acf6d9c6b834cf378303c7ee6e1af3f3cde2502d8f28a6e5d3ec33deb69434b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4200a3723d31af3d800fca144949b047d3ef2960d856f286899351523593061\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://671dc002b0ca1a50b36373cbf0a8971b0f751989c9f19acedb524b09afd53517\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ab08a748b9c3040ea1af963f8ebeef630d7fb260122baba05229615424850d6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:13Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.424438 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:13Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.439944 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eda516c-c3a2-4e46-b9c2-b603ebc2d618\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c15c835868f9e04648db64fcf271a4c212d406e7bcc7cfca15167b853a02a70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d0dbfcc18f63ddc136fc049c023f326cb882adbcf1123f1ec5d4b884ae3970a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed56fd63f436435206d3574740efba75e8a94582fe128388e03b398131acd4ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80bd23f5be42dc5714bba2b0f742b8aa9fdd60540db1048054e76c00f356b240\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83498000b4c2e2ea1b498069626fa6916e9fc5e95a3ecf0136b2cb0ca8a409b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759326292\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759326291\\\\\\\\\\\\\\\" (2025-10-01 12:44:51 +0000 UTC to 2026-10-01 12:44:51 +0000 UTC (now=2025-10-01 13:44:57.347830124 +0000 UTC))\\\\\\\"\\\\nI1001 13:44:57.347872 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1001 13:44:57.347895 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1001 13:44:57.347920 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347949 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347989 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4229714179/tls.crt::/tmp/serving-cert-4229714179/tls.key\\\\\\\"\\\\nI1001 13:44:57.348126 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348139 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1001 13:44:57.348152 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348158 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1001 13:44:57.348154 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1001 13:44:57.348212 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1001 13:44:57.348222 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF1001 13:44:57.350577 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:41Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://113f3599c725570c4484ed005921c30a43db5ffec24b72907c6c7546453fe363\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:13Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.453128 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:13Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.466935 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c3d55cda00f6c5763662b3f96b4c36f1fb8c220fcd14ff3469b565deac718a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e6578a529a458eab6242d8f667520303c65bd53ceba7598d4c9680c7a93bace\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:13Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.479975 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:13Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.491538 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wgx5p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c2ca71f-4cb0-4852-927d-af69be5d77f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1180cac382ded8ae1a7be2e5738d96beceed10f750d31e36ae1520416a71e8dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7gtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wgx5p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:13Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.503163 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.503205 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.503214 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.503231 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.503245 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:13Z","lastTransitionTime":"2025-10-01T13:45:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.511623 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://56f2691df87d1c843a73c06c39c7ad1d5477fb1f10c6f3f797f658e7d0c0d04d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09ecc57bc8186c325ef0bd38c17882024dda3714b52f9b0295a75ed93b2d6736\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f6f39aa4d204b41067d2705ce828e79752ca4e5b7689821e34c98f5041ce925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddba08349bbb0f7040bb04f37d4b5a137de327a5c4a7a5689396a2a1925b062a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://98143f416c776bf79ee24b42b70d88d1cbe4528bd7a1c0fc886a8f6e711c6400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8442849a4d37d618e701a2ea4c1dd33944894765c874fd5635c74d37a7f411b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://516a2c90123655832749bb1f379c3ac1800cbe59053cf56544424f8dabcee07a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8caf90cf5a2d382fcaf4732b5526ef00b1085e9d882b21e599a513188db5cc24\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T13:45:10Z\\\",\\\"message\\\":\\\"1001 13:45:10.084457 5745 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1001 13:45:10.084473 5745 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1001 13:45:10.084507 5745 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1001 13:45:10.084522 5745 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1001 13:45:10.084556 5745 factory.go:656] Stopping watch factory\\\\nI1001 13:45:10.084591 5745 handler.go:208] Removed *v1.Node event handler 7\\\\nI1001 13:45:10.084615 5745 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1001 13:45:10.084419 5745 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1001 13:45:10.085001 5745 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1001 13:45:10.085025 5745 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1001 13:45:10.085040 5745 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1001 13:45:10.085053 5745 handler.go:208] Removed *v1.Node event handler 2\\\\nI1001 13:45:10.085163 5745 reflector.go:311] Stopping reflector *v1.UserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:06Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://516a2c90123655832749bb1f379c3ac1800cbe59053cf56544424f8dabcee07a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T13:45:11Z\\\",\\\"message\\\":\\\"openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:serving-cert service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc0076549ef \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,Protocol:TCP,Port:443,TargetPort:{0 8443 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{app: service-ca-operator,},ClusterIP:10.217.4.40,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.4.40],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nF1001 13:45:11.204670 5928 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 0x1e60020 0x1e5ffc0} was not added to shared informer because it has stopped already, failed to\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d1219f1caadb9f0992fa56b158a23ace3616d178360c4690657deb351a3ccaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-kzv4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:13Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.523914 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gqbr2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a6400c9-7945-44a6-b37d-e94811fc9754\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c32729bcc338f810b05ad3ac0cd10feeeaff031cd81540b79fec759bbc2b419\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2jrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51a4fc25d3fb27e7ea7fba17367d9a0b3fefa388d306e3589ec13e3cf6e61452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2jrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:45:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-gqbr2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:13Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.535541 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3023060-c8ae-492b-b1cb-a418d9a8e59f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd1361d1d9cb03a3942918266a1e85d3e370eabdfa7b7b1e40971995928187b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae5b8e3f4bd159c632b04545707c7140ba6fcee21a3a3847d5e7f2b9e41b9178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdjh7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:13Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.549583 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xclfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9fc4aae-03cb-458d-83cb-1a3ab9fa9639\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f8eab67511426f65e14781420480f85ea6b0d9ab25f6846a68820a1ded0053f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://14d2a7aa9a0027ef28b5ec7b288ce0a5367c53f616ed3e75216d07c9809a86a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14d2a7aa9a0027ef28b5ec7b288ce0a5367c53f616ed3e75216d07c9809a86a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0abe21aad2433d34c4a769e87e28f3c479511a4580df607d1496ed33c85385c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0abe21aad2433d34c4a769e87e28f3c479511a4580df607d1496ed33c85385c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b870108ecd4816a8d0d3d32f4b2384246c0b4537639e9cd04d660fea3246244\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b870108ecd4816a8d0d3d32f4b2384246c0b4537639e9cd04d660fea3246244\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b96314b06953b56383a86305cf1702c5f02add621d7981a36444d2d3998cf0f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b96314b06953b56383a86305cf1702c5f02add621d7981a36444d2d3998cf0f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f0381694561100e524cc380f1a2c6050c7192f27ec8f27ad2e41abce969c1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24f0381694561100e524cc380f1a2c6050c7192f27ec8f27ad2e41abce969c1c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfb30493a0c650f9536bba6417da0a582b1b23b6488223359856c12d1024d57f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfb30493a0c650f9536bba6417da0a582b1b23b6488223359856c12d1024d57f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xclfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:13Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.562446 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d3d0d59e6a31efc844c56e1ad43cd326a7b2f1844784f2814469e36394cf377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:13Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.573273 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64e2eda677e75448f2e7e3fd477052f3a596e6c11d745848becc6c22f133e6b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:13Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.583662 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6zb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83630902-b99b-4944-81a4-487e9584e0c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0762e1125229327b00202fd05bc17fd641b76f2421e20d0672b3e2d3b0f7538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jt4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6zb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:13Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.594961 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jvqzn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4d5a988-e2c8-47db-b738-cb43467b1bfb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17326e690c100180a8e3d1a0180522378ef995bd1f5cba52d6f147a958351118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-985lh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:45:07Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jvqzn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:13Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.606169 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.606230 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.606242 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.606258 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.606274 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:13Z","lastTransitionTime":"2025-10-01T13:45:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.610049 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:13Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.613165 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-m7ph7"] Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.614203 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-m7ph7" Oct 01 13:45:13 crc kubenswrapper[4605]: E1001 13:45:13.614340 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-m7ph7" podUID="8c172ce5-f64e-417d-9fc7-e06c5e443fbc" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.632962 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eda516c-c3a2-4e46-b9c2-b603ebc2d618\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c15c835868f9e04648db64fcf271a4c212d406e7bcc7cfca15167b853a02a70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d0dbfcc18f63ddc136fc049c023f326cb882adbcf1123f1ec5d4b884ae3970a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed56fd63f436435206d3574740efba75e8a94582fe128388e03b398131acd4ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80bd23f5be42dc5714bba2b0f742b8aa9fdd60540db1048054e76c00f356b240\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83498000b4c2e2ea1b498069626fa6916e9fc5e95a3ecf0136b2cb0ca8a409b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759326292\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759326291\\\\\\\\\\\\\\\" (2025-10-01 12:44:51 +0000 UTC to 2026-10-01 12:44:51 +0000 UTC (now=2025-10-01 13:44:57.347830124 +0000 UTC))\\\\\\\"\\\\nI1001 13:44:57.347872 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1001 13:44:57.347895 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1001 13:44:57.347920 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347949 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347989 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4229714179/tls.crt::/tmp/serving-cert-4229714179/tls.key\\\\\\\"\\\\nI1001 13:44:57.348126 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348139 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1001 13:44:57.348152 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348158 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1001 13:44:57.348154 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1001 13:44:57.348212 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1001 13:44:57.348222 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF1001 13:44:57.350577 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:41Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://113f3599c725570c4484ed005921c30a43db5ffec24b72907c6c7546453fe363\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:13Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.649736 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ca1d91d-0902-4d3a-b66a-a556b5009d8b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acf6d9c6b834cf378303c7ee6e1af3f3cde2502d8f28a6e5d3ec33deb69434b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4200a3723d31af3d800fca144949b047d3ef2960d856f286899351523593061\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://671dc002b0ca1a50b36373cbf0a8971b0f751989c9f19acedb524b09afd53517\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ab08a748b9c3040ea1af963f8ebeef630d7fb260122baba05229615424850d6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:13Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.664520 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c3d55cda00f6c5763662b3f96b4c36f1fb8c220fcd14ff3469b565deac718a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e6578a529a458eab6242d8f667520303c65bd53ceba7598d4c9680c7a93bace\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:13Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.679471 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:13Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.696993 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:13Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.708685 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.708737 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.708751 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.708771 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.708785 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:13Z","lastTransitionTime":"2025-10-01T13:45:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.708806 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gqbr2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a6400c9-7945-44a6-b37d-e94811fc9754\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c32729bcc338f810b05ad3ac0cd10feeeaff031cd81540b79fec759bbc2b419\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2jrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51a4fc25d3fb27e7ea7fba17367d9a0b3fefa388d306e3589ec13e3cf6e61452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2jrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:45:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-gqbr2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:13Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.721518 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3023060-c8ae-492b-b1cb-a418d9a8e59f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd1361d1d9cb03a3942918266a1e85d3e370eabdfa7b7b1e40971995928187b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae5b8e3f4bd159c632b04545707c7140ba6fcee21a3a3847d5e7f2b9e41b9178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdjh7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:13Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.736150 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xclfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9fc4aae-03cb-458d-83cb-1a3ab9fa9639\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f8eab67511426f65e14781420480f85ea6b0d9ab25f6846a68820a1ded0053f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://14d2a7aa9a0027ef28b5ec7b288ce0a5367c53f616ed3e75216d07c9809a86a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14d2a7aa9a0027ef28b5ec7b288ce0a5367c53f616ed3e75216d07c9809a86a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0abe21aad2433d34c4a769e87e28f3c479511a4580df607d1496ed33c85385c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0abe21aad2433d34c4a769e87e28f3c479511a4580df607d1496ed33c85385c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b870108ecd4816a8d0d3d32f4b2384246c0b4537639e9cd04d660fea3246244\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b870108ecd4816a8d0d3d32f4b2384246c0b4537639e9cd04d660fea3246244\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b96314b06953b56383a86305cf1702c5f02add621d7981a36444d2d3998cf0f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b96314b06953b56383a86305cf1702c5f02add621d7981a36444d2d3998cf0f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f0381694561100e524cc380f1a2c6050c7192f27ec8f27ad2e41abce969c1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24f0381694561100e524cc380f1a2c6050c7192f27ec8f27ad2e41abce969c1c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfb30493a0c650f9536bba6417da0a582b1b23b6488223359856c12d1024d57f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfb30493a0c650f9536bba6417da0a582b1b23b6488223359856c12d1024d57f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xclfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:13Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.737568 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.737739 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8c172ce5-f64e-417d-9fc7-e06c5e443fbc-metrics-certs\") pod \"network-metrics-daemon-m7ph7\" (UID: \"8c172ce5-f64e-417d-9fc7-e06c5e443fbc\") " pod="openshift-multus/network-metrics-daemon-m7ph7" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.737787 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.737828 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:45:13 crc kubenswrapper[4605]: E1001 13:45:13.737921 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 13:45:29.737882738 +0000 UTC m=+52.481858936 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:45:13 crc kubenswrapper[4605]: E1001 13:45:13.737968 4605 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 01 13:45:13 crc kubenswrapper[4605]: E1001 13:45:13.737986 4605 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.738024 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 13:45:13 crc kubenswrapper[4605]: E1001 13:45:13.738070 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-01 13:45:29.738046833 +0000 UTC m=+52.482023211 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 01 13:45:13 crc kubenswrapper[4605]: E1001 13:45:13.738410 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-01 13:45:29.738397652 +0000 UTC m=+52.482373860 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 01 13:45:13 crc kubenswrapper[4605]: E1001 13:45:13.738197 4605 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 01 13:45:13 crc kubenswrapper[4605]: E1001 13:45:13.738485 4605 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 01 13:45:13 crc kubenswrapper[4605]: E1001 13:45:13.738503 4605 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 13:45:13 crc kubenswrapper[4605]: E1001 13:45:13.738540 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-01 13:45:29.738532516 +0000 UTC m=+52.482508724 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.738565 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k5krq\" (UniqueName: \"kubernetes.io/projected/8c172ce5-f64e-417d-9fc7-e06c5e443fbc-kube-api-access-k5krq\") pod \"network-metrics-daemon-m7ph7\" (UID: \"8c172ce5-f64e-417d-9fc7-e06c5e443fbc\") " pod="openshift-multus/network-metrics-daemon-m7ph7" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.738621 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 13:45:13 crc kubenswrapper[4605]: E1001 13:45:13.738812 4605 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 01 13:45:13 crc kubenswrapper[4605]: E1001 13:45:13.738854 4605 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 01 13:45:13 crc kubenswrapper[4605]: E1001 13:45:13.738875 4605 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 13:45:13 crc kubenswrapper[4605]: E1001 13:45:13.739188 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-01 13:45:29.739160953 +0000 UTC m=+52.483137201 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.750789 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wgx5p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c2ca71f-4cb0-4852-927d-af69be5d77f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1180cac382ded8ae1a7be2e5738d96beceed10f750d31e36ae1520416a71e8dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7gtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wgx5p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:13Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.771932 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://56f2691df87d1c843a73c06c39c7ad1d5477fb1f10c6f3f797f658e7d0c0d04d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09ecc57bc8186c325ef0bd38c17882024dda3714b52f9b0295a75ed93b2d6736\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f6f39aa4d204b41067d2705ce828e79752ca4e5b7689821e34c98f5041ce925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddba08349bbb0f7040bb04f37d4b5a137de327a5c4a7a5689396a2a1925b062a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://98143f416c776bf79ee24b42b70d88d1cbe4528bd7a1c0fc886a8f6e711c6400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8442849a4d37d618e701a2ea4c1dd33944894765c874fd5635c74d37a7f411b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://516a2c90123655832749bb1f379c3ac1800cbe59053cf56544424f8dabcee07a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://516a2c90123655832749bb1f379c3ac1800cbe59053cf56544424f8dabcee07a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T13:45:11Z\\\",\\\"message\\\":\\\"openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:serving-cert service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc0076549ef \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,Protocol:TCP,Port:443,TargetPort:{0 8443 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{app: service-ca-operator,},ClusterIP:10.217.4.40,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.4.40],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nF1001 13:45:11.204670 5928 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 0x1e60020 0x1e5ffc0} was not added to shared informer because it has stopped already, failed to\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:10Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-kzv4p_openshift-ovn-kubernetes(e0b90c02-c41c-4f5b-ae0a-c6444435a3ae)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d1219f1caadb9f0992fa56b158a23ace3616d178360c4690657deb351a3ccaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-kzv4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:13Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.793326 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://56f2691df87d1c843a73c06c39c7ad1d5477fb1f10c6f3f797f658e7d0c0d04d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09ecc57bc8186c325ef0bd38c17882024dda3714b52f9b0295a75ed93b2d6736\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f6f39aa4d204b41067d2705ce828e79752ca4e5b7689821e34c98f5041ce925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddba08349bbb0f7040bb04f37d4b5a137de327a5c4a7a5689396a2a1925b062a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://98143f416c776bf79ee24b42b70d88d1cbe4528bd7a1c0fc886a8f6e711c6400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8442849a4d37d618e701a2ea4c1dd33944894765c874fd5635c74d37a7f411b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://516a2c90123655832749bb1f379c3ac1800cbe59053cf56544424f8dabcee07a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://516a2c90123655832749bb1f379c3ac1800cbe59053cf56544424f8dabcee07a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T13:45:11Z\\\",\\\"message\\\":\\\"openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:serving-cert service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc0076549ef \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,Protocol:TCP,Port:443,TargetPort:{0 8443 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{app: service-ca-operator,},ClusterIP:10.217.4.40,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.4.40],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nF1001 13:45:11.204670 5928 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 0x1e60020 0x1e5ffc0} was not added to shared informer because it has stopped already, failed to\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:10Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-kzv4p_openshift-ovn-kubernetes(e0b90c02-c41c-4f5b-ae0a-c6444435a3ae)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d1219f1caadb9f0992fa56b158a23ace3616d178360c4690657deb351a3ccaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-kzv4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:13Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.806846 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gqbr2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a6400c9-7945-44a6-b37d-e94811fc9754\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c32729bcc338f810b05ad3ac0cd10feeeaff031cd81540b79fec759bbc2b419\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2jrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51a4fc25d3fb27e7ea7fba17367d9a0b3fefa388d306e3589ec13e3cf6e61452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2jrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:45:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-gqbr2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:13Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.811142 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.811194 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.811206 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.811231 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.811247 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:13Z","lastTransitionTime":"2025-10-01T13:45:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.817616 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3023060-c8ae-492b-b1cb-a418d9a8e59f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd1361d1d9cb03a3942918266a1e85d3e370eabdfa7b7b1e40971995928187b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae5b8e3f4bd159c632b04545707c7140ba6fcee21a3a3847d5e7f2b9e41b9178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdjh7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:13Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.834524 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xclfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9fc4aae-03cb-458d-83cb-1a3ab9fa9639\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f8eab67511426f65e14781420480f85ea6b0d9ab25f6846a68820a1ded0053f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://14d2a7aa9a0027ef28b5ec7b288ce0a5367c53f616ed3e75216d07c9809a86a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14d2a7aa9a0027ef28b5ec7b288ce0a5367c53f616ed3e75216d07c9809a86a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0abe21aad2433d34c4a769e87e28f3c479511a4580df607d1496ed33c85385c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0abe21aad2433d34c4a769e87e28f3c479511a4580df607d1496ed33c85385c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b870108ecd4816a8d0d3d32f4b2384246c0b4537639e9cd04d660fea3246244\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b870108ecd4816a8d0d3d32f4b2384246c0b4537639e9cd04d660fea3246244\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b96314b06953b56383a86305cf1702c5f02add621d7981a36444d2d3998cf0f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b96314b06953b56383a86305cf1702c5f02add621d7981a36444d2d3998cf0f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f0381694561100e524cc380f1a2c6050c7192f27ec8f27ad2e41abce969c1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24f0381694561100e524cc380f1a2c6050c7192f27ec8f27ad2e41abce969c1c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfb30493a0c650f9536bba6417da0a582b1b23b6488223359856c12d1024d57f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfb30493a0c650f9536bba6417da0a582b1b23b6488223359856c12d1024d57f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xclfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:13Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.839876 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k5krq\" (UniqueName: \"kubernetes.io/projected/8c172ce5-f64e-417d-9fc7-e06c5e443fbc-kube-api-access-k5krq\") pod \"network-metrics-daemon-m7ph7\" (UID: \"8c172ce5-f64e-417d-9fc7-e06c5e443fbc\") " pod="openshift-multus/network-metrics-daemon-m7ph7" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.839950 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8c172ce5-f64e-417d-9fc7-e06c5e443fbc-metrics-certs\") pod \"network-metrics-daemon-m7ph7\" (UID: \"8c172ce5-f64e-417d-9fc7-e06c5e443fbc\") " pod="openshift-multus/network-metrics-daemon-m7ph7" Oct 01 13:45:13 crc kubenswrapper[4605]: E1001 13:45:13.840127 4605 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 01 13:45:13 crc kubenswrapper[4605]: E1001 13:45:13.840309 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8c172ce5-f64e-417d-9fc7-e06c5e443fbc-metrics-certs podName:8c172ce5-f64e-417d-9fc7-e06c5e443fbc nodeName:}" failed. No retries permitted until 2025-10-01 13:45:14.340288698 +0000 UTC m=+37.084264906 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/8c172ce5-f64e-417d-9fc7-e06c5e443fbc-metrics-certs") pod "network-metrics-daemon-m7ph7" (UID: "8c172ce5-f64e-417d-9fc7-e06c5e443fbc") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.850709 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wgx5p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c2ca71f-4cb0-4852-927d-af69be5d77f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1180cac382ded8ae1a7be2e5738d96beceed10f750d31e36ae1520416a71e8dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7gtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wgx5p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:13Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.855329 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k5krq\" (UniqueName: \"kubernetes.io/projected/8c172ce5-f64e-417d-9fc7-e06c5e443fbc-kube-api-access-k5krq\") pod \"network-metrics-daemon-m7ph7\" (UID: \"8c172ce5-f64e-417d-9fc7-e06c5e443fbc\") " pod="openshift-multus/network-metrics-daemon-m7ph7" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.864261 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jvqzn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4d5a988-e2c8-47db-b738-cb43467b1bfb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17326e690c100180a8e3d1a0180522378ef995bd1f5cba52d6f147a958351118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-985lh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:45:07Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jvqzn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:13Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.879390 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d3d0d59e6a31efc844c56e1ad43cd326a7b2f1844784f2814469e36394cf377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:13Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.895228 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64e2eda677e75448f2e7e3fd477052f3a596e6c11d745848becc6c22f133e6b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:13Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.906805 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6zb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83630902-b99b-4944-81a4-487e9584e0c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0762e1125229327b00202fd05bc17fd641b76f2421e20d0672b3e2d3b0f7538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jt4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6zb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:13Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.914234 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.914283 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.914294 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.914314 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.914325 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:13Z","lastTransitionTime":"2025-10-01T13:45:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.918739 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:13Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.925711 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.925825 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 13:45:13 crc kubenswrapper[4605]: E1001 13:45:13.925949 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.925979 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:45:13 crc kubenswrapper[4605]: E1001 13:45:13.926186 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 13:45:13 crc kubenswrapper[4605]: E1001 13:45:13.926294 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.938262 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eda516c-c3a2-4e46-b9c2-b603ebc2d618\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c15c835868f9e04648db64fcf271a4c212d406e7bcc7cfca15167b853a02a70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d0dbfcc18f63ddc136fc049c023f326cb882adbcf1123f1ec5d4b884ae3970a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed56fd63f436435206d3574740efba75e8a94582fe128388e03b398131acd4ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80bd23f5be42dc5714bba2b0f742b8aa9fdd60540db1048054e76c00f356b240\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83498000b4c2e2ea1b498069626fa6916e9fc5e95a3ecf0136b2cb0ca8a409b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759326292\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759326291\\\\\\\\\\\\\\\" (2025-10-01 12:44:51 +0000 UTC to 2026-10-01 12:44:51 +0000 UTC (now=2025-10-01 13:44:57.347830124 +0000 UTC))\\\\\\\"\\\\nI1001 13:44:57.347872 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1001 13:44:57.347895 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1001 13:44:57.347920 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347949 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347989 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4229714179/tls.crt::/tmp/serving-cert-4229714179/tls.key\\\\\\\"\\\\nI1001 13:44:57.348126 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348139 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1001 13:44:57.348152 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348158 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1001 13:44:57.348154 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1001 13:44:57.348212 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1001 13:44:57.348222 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF1001 13:44:57.350577 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:41Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://113f3599c725570c4484ed005921c30a43db5ffec24b72907c6c7546453fe363\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:13Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.956077 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ca1d91d-0902-4d3a-b66a-a556b5009d8b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acf6d9c6b834cf378303c7ee6e1af3f3cde2502d8f28a6e5d3ec33deb69434b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4200a3723d31af3d800fca144949b047d3ef2960d856f286899351523593061\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://671dc002b0ca1a50b36373cbf0a8971b0f751989c9f19acedb524b09afd53517\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ab08a748b9c3040ea1af963f8ebeef630d7fb260122baba05229615424850d6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:13Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.968078 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-m7ph7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8c172ce5-f64e-417d-9fc7-e06c5e443fbc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5krq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5krq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:45:13Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-m7ph7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:13Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.984286 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c3d55cda00f6c5763662b3f96b4c36f1fb8c220fcd14ff3469b565deac718a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e6578a529a458eab6242d8f667520303c65bd53ceba7598d4c9680c7a93bace\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:13Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:13 crc kubenswrapper[4605]: I1001 13:45:13.998461 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:13Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.012546 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:14Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.016540 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.016603 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.016623 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.016701 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.016731 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:14Z","lastTransitionTime":"2025-10-01T13:45:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.039949 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.039976 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.039984 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.039996 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.040006 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:14Z","lastTransitionTime":"2025-10-01T13:45:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:14 crc kubenswrapper[4605]: E1001 13:45:14.059827 4605 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:14Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:14Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1d17ca42-5162-4e53-b9d0-0c11f7d91daa\\\",\\\"systemUUID\\\":\\\"1ac84113-1352-4ad6-8d32-f12829b39b5d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:14Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.065983 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.066015 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.066024 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.066043 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.066057 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:14Z","lastTransitionTime":"2025-10-01T13:45:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:14 crc kubenswrapper[4605]: E1001 13:45:14.080998 4605 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:14Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:14Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1d17ca42-5162-4e53-b9d0-0c11f7d91daa\\\",\\\"systemUUID\\\":\\\"1ac84113-1352-4ad6-8d32-f12829b39b5d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:14Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.085141 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.085262 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.085323 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.085424 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.085548 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:14Z","lastTransitionTime":"2025-10-01T13:45:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:14 crc kubenswrapper[4605]: E1001 13:45:14.099815 4605 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:14Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:14Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1d17ca42-5162-4e53-b9d0-0c11f7d91daa\\\",\\\"systemUUID\\\":\\\"1ac84113-1352-4ad6-8d32-f12829b39b5d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:14Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.103349 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.103384 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.103393 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.103409 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.103421 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:14Z","lastTransitionTime":"2025-10-01T13:45:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:14 crc kubenswrapper[4605]: E1001 13:45:14.116445 4605 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:14Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:14Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1d17ca42-5162-4e53-b9d0-0c11f7d91daa\\\",\\\"systemUUID\\\":\\\"1ac84113-1352-4ad6-8d32-f12829b39b5d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:14Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.125722 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.125803 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.125813 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.125831 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.125842 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:14Z","lastTransitionTime":"2025-10-01T13:45:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:14 crc kubenswrapper[4605]: E1001 13:45:14.141135 4605 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:14Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:14Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1d17ca42-5162-4e53-b9d0-0c11f7d91daa\\\",\\\"systemUUID\\\":\\\"1ac84113-1352-4ad6-8d32-f12829b39b5d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:14Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:14 crc kubenswrapper[4605]: E1001 13:45:14.141257 4605 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.143422 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.143467 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.143481 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.143502 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.143519 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:14Z","lastTransitionTime":"2025-10-01T13:45:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.245951 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.246008 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.246021 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.246041 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.246054 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:14Z","lastTransitionTime":"2025-10-01T13:45:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.344843 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8c172ce5-f64e-417d-9fc7-e06c5e443fbc-metrics-certs\") pod \"network-metrics-daemon-m7ph7\" (UID: \"8c172ce5-f64e-417d-9fc7-e06c5e443fbc\") " pod="openshift-multus/network-metrics-daemon-m7ph7" Oct 01 13:45:14 crc kubenswrapper[4605]: E1001 13:45:14.345133 4605 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 01 13:45:14 crc kubenswrapper[4605]: E1001 13:45:14.345265 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8c172ce5-f64e-417d-9fc7-e06c5e443fbc-metrics-certs podName:8c172ce5-f64e-417d-9fc7-e06c5e443fbc nodeName:}" failed. No retries permitted until 2025-10-01 13:45:15.345243226 +0000 UTC m=+38.089219434 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/8c172ce5-f64e-417d-9fc7-e06c5e443fbc-metrics-certs") pod "network-metrics-daemon-m7ph7" (UID: "8c172ce5-f64e-417d-9fc7-e06c5e443fbc") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.349481 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.349557 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.349580 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.349614 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.349637 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:14Z","lastTransitionTime":"2025-10-01T13:45:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.453612 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.453672 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.453682 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.453699 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.453711 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:14Z","lastTransitionTime":"2025-10-01T13:45:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.557171 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.557553 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.557649 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.557771 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.557858 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:14Z","lastTransitionTime":"2025-10-01T13:45:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.661165 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.661210 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.661219 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.661236 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.661247 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:14Z","lastTransitionTime":"2025-10-01T13:45:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.765905 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.766288 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.766300 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.766317 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.766329 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:14Z","lastTransitionTime":"2025-10-01T13:45:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.869571 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.869612 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.869626 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.869645 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.869658 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:14Z","lastTransitionTime":"2025-10-01T13:45:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.972468 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.972649 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.972709 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.972768 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:14 crc kubenswrapper[4605]: I1001 13:45:14.972843 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:14Z","lastTransitionTime":"2025-10-01T13:45:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:15 crc kubenswrapper[4605]: I1001 13:45:15.015236 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 01 13:45:15 crc kubenswrapper[4605]: I1001 13:45:15.030053 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:15Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:15 crc kubenswrapper[4605]: I1001 13:45:15.045856 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eda516c-c3a2-4e46-b9c2-b603ebc2d618\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c15c835868f9e04648db64fcf271a4c212d406e7bcc7cfca15167b853a02a70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d0dbfcc18f63ddc136fc049c023f326cb882adbcf1123f1ec5d4b884ae3970a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed56fd63f436435206d3574740efba75e8a94582fe128388e03b398131acd4ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80bd23f5be42dc5714bba2b0f742b8aa9fdd60540db1048054e76c00f356b240\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83498000b4c2e2ea1b498069626fa6916e9fc5e95a3ecf0136b2cb0ca8a409b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759326292\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759326291\\\\\\\\\\\\\\\" (2025-10-01 12:44:51 +0000 UTC to 2026-10-01 12:44:51 +0000 UTC (now=2025-10-01 13:44:57.347830124 +0000 UTC))\\\\\\\"\\\\nI1001 13:44:57.347872 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1001 13:44:57.347895 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1001 13:44:57.347920 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347949 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347989 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4229714179/tls.crt::/tmp/serving-cert-4229714179/tls.key\\\\\\\"\\\\nI1001 13:44:57.348126 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348139 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1001 13:44:57.348152 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348158 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1001 13:44:57.348154 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1001 13:44:57.348212 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1001 13:44:57.348222 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF1001 13:44:57.350577 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:41Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://113f3599c725570c4484ed005921c30a43db5ffec24b72907c6c7546453fe363\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:15Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:15 crc kubenswrapper[4605]: I1001 13:45:15.060302 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ca1d91d-0902-4d3a-b66a-a556b5009d8b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acf6d9c6b834cf378303c7ee6e1af3f3cde2502d8f28a6e5d3ec33deb69434b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4200a3723d31af3d800fca144949b047d3ef2960d856f286899351523593061\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://671dc002b0ca1a50b36373cbf0a8971b0f751989c9f19acedb524b09afd53517\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ab08a748b9c3040ea1af963f8ebeef630d7fb260122baba05229615424850d6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:15Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:15 crc kubenswrapper[4605]: I1001 13:45:15.075740 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:15 crc kubenswrapper[4605]: I1001 13:45:15.075794 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:15 crc kubenswrapper[4605]: I1001 13:45:15.075811 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:15 crc kubenswrapper[4605]: I1001 13:45:15.075833 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:15 crc kubenswrapper[4605]: I1001 13:45:15.075849 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:15Z","lastTransitionTime":"2025-10-01T13:45:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:15 crc kubenswrapper[4605]: I1001 13:45:15.077630 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c3d55cda00f6c5763662b3f96b4c36f1fb8c220fcd14ff3469b565deac718a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e6578a529a458eab6242d8f667520303c65bd53ceba7598d4c9680c7a93bace\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:15Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:15 crc kubenswrapper[4605]: I1001 13:45:15.092297 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:15Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:15 crc kubenswrapper[4605]: I1001 13:45:15.106926 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:15Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:15 crc kubenswrapper[4605]: I1001 13:45:15.118764 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-m7ph7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8c172ce5-f64e-417d-9fc7-e06c5e443fbc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5krq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5krq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:45:13Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-m7ph7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:15Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:15 crc kubenswrapper[4605]: I1001 13:45:15.134493 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gqbr2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a6400c9-7945-44a6-b37d-e94811fc9754\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c32729bcc338f810b05ad3ac0cd10feeeaff031cd81540b79fec759bbc2b419\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2jrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51a4fc25d3fb27e7ea7fba17367d9a0b3fefa388d306e3589ec13e3cf6e61452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2jrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:45:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-gqbr2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:15Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:15 crc kubenswrapper[4605]: I1001 13:45:15.148236 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3023060-c8ae-492b-b1cb-a418d9a8e59f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd1361d1d9cb03a3942918266a1e85d3e370eabdfa7b7b1e40971995928187b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae5b8e3f4bd159c632b04545707c7140ba6fcee21a3a3847d5e7f2b9e41b9178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdjh7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:15Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:15 crc kubenswrapper[4605]: I1001 13:45:15.164982 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xclfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9fc4aae-03cb-458d-83cb-1a3ab9fa9639\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f8eab67511426f65e14781420480f85ea6b0d9ab25f6846a68820a1ded0053f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://14d2a7aa9a0027ef28b5ec7b288ce0a5367c53f616ed3e75216d07c9809a86a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14d2a7aa9a0027ef28b5ec7b288ce0a5367c53f616ed3e75216d07c9809a86a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0abe21aad2433d34c4a769e87e28f3c479511a4580df607d1496ed33c85385c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0abe21aad2433d34c4a769e87e28f3c479511a4580df607d1496ed33c85385c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b870108ecd4816a8d0d3d32f4b2384246c0b4537639e9cd04d660fea3246244\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b870108ecd4816a8d0d3d32f4b2384246c0b4537639e9cd04d660fea3246244\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b96314b06953b56383a86305cf1702c5f02add621d7981a36444d2d3998cf0f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b96314b06953b56383a86305cf1702c5f02add621d7981a36444d2d3998cf0f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f0381694561100e524cc380f1a2c6050c7192f27ec8f27ad2e41abce969c1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24f0381694561100e524cc380f1a2c6050c7192f27ec8f27ad2e41abce969c1c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfb30493a0c650f9536bba6417da0a582b1b23b6488223359856c12d1024d57f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfb30493a0c650f9536bba6417da0a582b1b23b6488223359856c12d1024d57f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xclfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:15Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:15 crc kubenswrapper[4605]: I1001 13:45:15.178476 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:15 crc kubenswrapper[4605]: I1001 13:45:15.178896 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:15 crc kubenswrapper[4605]: I1001 13:45:15.179052 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:15 crc kubenswrapper[4605]: I1001 13:45:15.179167 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:15 crc kubenswrapper[4605]: I1001 13:45:15.179297 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:15Z","lastTransitionTime":"2025-10-01T13:45:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:15 crc kubenswrapper[4605]: I1001 13:45:15.182525 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wgx5p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c2ca71f-4cb0-4852-927d-af69be5d77f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1180cac382ded8ae1a7be2e5738d96beceed10f750d31e36ae1520416a71e8dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7gtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wgx5p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:15Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:15 crc kubenswrapper[4605]: I1001 13:45:15.203263 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://56f2691df87d1c843a73c06c39c7ad1d5477fb1f10c6f3f797f658e7d0c0d04d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09ecc57bc8186c325ef0bd38c17882024dda3714b52f9b0295a75ed93b2d6736\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f6f39aa4d204b41067d2705ce828e79752ca4e5b7689821e34c98f5041ce925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddba08349bbb0f7040bb04f37d4b5a137de327a5c4a7a5689396a2a1925b062a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://98143f416c776bf79ee24b42b70d88d1cbe4528bd7a1c0fc886a8f6e711c6400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8442849a4d37d618e701a2ea4c1dd33944894765c874fd5635c74d37a7f411b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://516a2c90123655832749bb1f379c3ac1800cbe59053cf56544424f8dabcee07a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://516a2c90123655832749bb1f379c3ac1800cbe59053cf56544424f8dabcee07a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T13:45:11Z\\\",\\\"message\\\":\\\"openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:serving-cert service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc0076549ef \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,Protocol:TCP,Port:443,TargetPort:{0 8443 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{app: service-ca-operator,},ClusterIP:10.217.4.40,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.4.40],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nF1001 13:45:11.204670 5928 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 0x1e60020 0x1e5ffc0} was not added to shared informer because it has stopped already, failed to\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:10Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-kzv4p_openshift-ovn-kubernetes(e0b90c02-c41c-4f5b-ae0a-c6444435a3ae)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d1219f1caadb9f0992fa56b158a23ace3616d178360c4690657deb351a3ccaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-kzv4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:15Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:15 crc kubenswrapper[4605]: I1001 13:45:15.218995 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d3d0d59e6a31efc844c56e1ad43cd326a7b2f1844784f2814469e36394cf377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:15Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:15 crc kubenswrapper[4605]: I1001 13:45:15.233245 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64e2eda677e75448f2e7e3fd477052f3a596e6c11d745848becc6c22f133e6b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:15Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:15 crc kubenswrapper[4605]: I1001 13:45:15.245549 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6zb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83630902-b99b-4944-81a4-487e9584e0c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0762e1125229327b00202fd05bc17fd641b76f2421e20d0672b3e2d3b0f7538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jt4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6zb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:15Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:15 crc kubenswrapper[4605]: I1001 13:45:15.256771 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jvqzn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4d5a988-e2c8-47db-b738-cb43467b1bfb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17326e690c100180a8e3d1a0180522378ef995bd1f5cba52d6f147a958351118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-985lh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:45:07Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jvqzn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:15Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:15 crc kubenswrapper[4605]: I1001 13:45:15.282312 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:15 crc kubenswrapper[4605]: I1001 13:45:15.282356 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:15 crc kubenswrapper[4605]: I1001 13:45:15.282365 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:15 crc kubenswrapper[4605]: I1001 13:45:15.282384 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:15 crc kubenswrapper[4605]: I1001 13:45:15.282396 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:15Z","lastTransitionTime":"2025-10-01T13:45:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:15 crc kubenswrapper[4605]: I1001 13:45:15.355694 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8c172ce5-f64e-417d-9fc7-e06c5e443fbc-metrics-certs\") pod \"network-metrics-daemon-m7ph7\" (UID: \"8c172ce5-f64e-417d-9fc7-e06c5e443fbc\") " pod="openshift-multus/network-metrics-daemon-m7ph7" Oct 01 13:45:15 crc kubenswrapper[4605]: E1001 13:45:15.355898 4605 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 01 13:45:15 crc kubenswrapper[4605]: E1001 13:45:15.355979 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8c172ce5-f64e-417d-9fc7-e06c5e443fbc-metrics-certs podName:8c172ce5-f64e-417d-9fc7-e06c5e443fbc nodeName:}" failed. No retries permitted until 2025-10-01 13:45:17.355962354 +0000 UTC m=+40.099938562 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/8c172ce5-f64e-417d-9fc7-e06c5e443fbc-metrics-certs") pod "network-metrics-daemon-m7ph7" (UID: "8c172ce5-f64e-417d-9fc7-e06c5e443fbc") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 01 13:45:15 crc kubenswrapper[4605]: I1001 13:45:15.385210 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:15 crc kubenswrapper[4605]: I1001 13:45:15.385247 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:15 crc kubenswrapper[4605]: I1001 13:45:15.385256 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:15 crc kubenswrapper[4605]: I1001 13:45:15.385271 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:15 crc kubenswrapper[4605]: I1001 13:45:15.385281 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:15Z","lastTransitionTime":"2025-10-01T13:45:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:15 crc kubenswrapper[4605]: I1001 13:45:15.488763 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:15 crc kubenswrapper[4605]: I1001 13:45:15.488826 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:15 crc kubenswrapper[4605]: I1001 13:45:15.488839 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:15 crc kubenswrapper[4605]: I1001 13:45:15.488858 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:15 crc kubenswrapper[4605]: I1001 13:45:15.488869 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:15Z","lastTransitionTime":"2025-10-01T13:45:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:15 crc kubenswrapper[4605]: I1001 13:45:15.591817 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:15 crc kubenswrapper[4605]: I1001 13:45:15.591902 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:15 crc kubenswrapper[4605]: I1001 13:45:15.591914 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:15 crc kubenswrapper[4605]: I1001 13:45:15.591935 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:15 crc kubenswrapper[4605]: I1001 13:45:15.591951 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:15Z","lastTransitionTime":"2025-10-01T13:45:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:15 crc kubenswrapper[4605]: I1001 13:45:15.694901 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:15 crc kubenswrapper[4605]: I1001 13:45:15.695010 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:15 crc kubenswrapper[4605]: I1001 13:45:15.695025 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:15 crc kubenswrapper[4605]: I1001 13:45:15.695047 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:15 crc kubenswrapper[4605]: I1001 13:45:15.695063 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:15Z","lastTransitionTime":"2025-10-01T13:45:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:15 crc kubenswrapper[4605]: I1001 13:45:15.798547 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:15 crc kubenswrapper[4605]: I1001 13:45:15.798593 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:15 crc kubenswrapper[4605]: I1001 13:45:15.798605 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:15 crc kubenswrapper[4605]: I1001 13:45:15.798624 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:15 crc kubenswrapper[4605]: I1001 13:45:15.798636 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:15Z","lastTransitionTime":"2025-10-01T13:45:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:15 crc kubenswrapper[4605]: I1001 13:45:15.902056 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:15 crc kubenswrapper[4605]: I1001 13:45:15.902126 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:15 crc kubenswrapper[4605]: I1001 13:45:15.902137 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:15 crc kubenswrapper[4605]: I1001 13:45:15.902157 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:15 crc kubenswrapper[4605]: I1001 13:45:15.902171 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:15Z","lastTransitionTime":"2025-10-01T13:45:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:15 crc kubenswrapper[4605]: I1001 13:45:15.926681 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 13:45:15 crc kubenswrapper[4605]: I1001 13:45:15.926793 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:45:15 crc kubenswrapper[4605]: I1001 13:45:15.926855 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 13:45:15 crc kubenswrapper[4605]: E1001 13:45:15.926906 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 13:45:15 crc kubenswrapper[4605]: I1001 13:45:15.927022 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-m7ph7" Oct 01 13:45:15 crc kubenswrapper[4605]: E1001 13:45:15.927142 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 13:45:15 crc kubenswrapper[4605]: E1001 13:45:15.927055 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 13:45:15 crc kubenswrapper[4605]: E1001 13:45:15.927327 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-m7ph7" podUID="8c172ce5-f64e-417d-9fc7-e06c5e443fbc" Oct 01 13:45:16 crc kubenswrapper[4605]: I1001 13:45:16.005593 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:16 crc kubenswrapper[4605]: I1001 13:45:16.005663 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:16 crc kubenswrapper[4605]: I1001 13:45:16.005677 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:16 crc kubenswrapper[4605]: I1001 13:45:16.005716 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:16 crc kubenswrapper[4605]: I1001 13:45:16.005730 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:16Z","lastTransitionTime":"2025-10-01T13:45:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:16 crc kubenswrapper[4605]: I1001 13:45:16.108938 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:16 crc kubenswrapper[4605]: I1001 13:45:16.109021 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:16 crc kubenswrapper[4605]: I1001 13:45:16.109041 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:16 crc kubenswrapper[4605]: I1001 13:45:16.109063 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:16 crc kubenswrapper[4605]: I1001 13:45:16.109077 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:16Z","lastTransitionTime":"2025-10-01T13:45:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:16 crc kubenswrapper[4605]: I1001 13:45:16.211987 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:16 crc kubenswrapper[4605]: I1001 13:45:16.212052 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:16 crc kubenswrapper[4605]: I1001 13:45:16.212064 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:16 crc kubenswrapper[4605]: I1001 13:45:16.212115 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:16 crc kubenswrapper[4605]: I1001 13:45:16.212131 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:16Z","lastTransitionTime":"2025-10-01T13:45:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:16 crc kubenswrapper[4605]: I1001 13:45:16.315217 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:16 crc kubenswrapper[4605]: I1001 13:45:16.315274 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:16 crc kubenswrapper[4605]: I1001 13:45:16.315285 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:16 crc kubenswrapper[4605]: I1001 13:45:16.315301 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:16 crc kubenswrapper[4605]: I1001 13:45:16.315311 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:16Z","lastTransitionTime":"2025-10-01T13:45:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:16 crc kubenswrapper[4605]: I1001 13:45:16.417782 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:16 crc kubenswrapper[4605]: I1001 13:45:16.417824 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:16 crc kubenswrapper[4605]: I1001 13:45:16.417835 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:16 crc kubenswrapper[4605]: I1001 13:45:16.417850 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:16 crc kubenswrapper[4605]: I1001 13:45:16.417860 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:16Z","lastTransitionTime":"2025-10-01T13:45:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:16 crc kubenswrapper[4605]: I1001 13:45:16.520266 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:16 crc kubenswrapper[4605]: I1001 13:45:16.520310 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:16 crc kubenswrapper[4605]: I1001 13:45:16.520321 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:16 crc kubenswrapper[4605]: I1001 13:45:16.520340 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:16 crc kubenswrapper[4605]: I1001 13:45:16.520353 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:16Z","lastTransitionTime":"2025-10-01T13:45:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:16 crc kubenswrapper[4605]: I1001 13:45:16.623653 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:16 crc kubenswrapper[4605]: I1001 13:45:16.623713 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:16 crc kubenswrapper[4605]: I1001 13:45:16.623727 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:16 crc kubenswrapper[4605]: I1001 13:45:16.623748 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:16 crc kubenswrapper[4605]: I1001 13:45:16.623759 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:16Z","lastTransitionTime":"2025-10-01T13:45:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:16 crc kubenswrapper[4605]: I1001 13:45:16.727011 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:16 crc kubenswrapper[4605]: I1001 13:45:16.727059 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:16 crc kubenswrapper[4605]: I1001 13:45:16.727074 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:16 crc kubenswrapper[4605]: I1001 13:45:16.727128 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:16 crc kubenswrapper[4605]: I1001 13:45:16.727142 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:16Z","lastTransitionTime":"2025-10-01T13:45:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:16 crc kubenswrapper[4605]: I1001 13:45:16.829286 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:16 crc kubenswrapper[4605]: I1001 13:45:16.829325 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:16 crc kubenswrapper[4605]: I1001 13:45:16.829334 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:16 crc kubenswrapper[4605]: I1001 13:45:16.829351 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:16 crc kubenswrapper[4605]: I1001 13:45:16.829362 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:16Z","lastTransitionTime":"2025-10-01T13:45:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:16 crc kubenswrapper[4605]: I1001 13:45:16.932345 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:16 crc kubenswrapper[4605]: I1001 13:45:16.932427 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:16 crc kubenswrapper[4605]: I1001 13:45:16.932436 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:16 crc kubenswrapper[4605]: I1001 13:45:16.932453 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:16 crc kubenswrapper[4605]: I1001 13:45:16.932464 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:16Z","lastTransitionTime":"2025-10-01T13:45:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:17 crc kubenswrapper[4605]: I1001 13:45:17.035317 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:17 crc kubenswrapper[4605]: I1001 13:45:17.035367 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:17 crc kubenswrapper[4605]: I1001 13:45:17.035379 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:17 crc kubenswrapper[4605]: I1001 13:45:17.035397 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:17 crc kubenswrapper[4605]: I1001 13:45:17.035412 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:17Z","lastTransitionTime":"2025-10-01T13:45:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:17 crc kubenswrapper[4605]: I1001 13:45:17.138127 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:17 crc kubenswrapper[4605]: I1001 13:45:17.138188 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:17 crc kubenswrapper[4605]: I1001 13:45:17.138202 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:17 crc kubenswrapper[4605]: I1001 13:45:17.138223 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:17 crc kubenswrapper[4605]: I1001 13:45:17.138241 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:17Z","lastTransitionTime":"2025-10-01T13:45:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:17 crc kubenswrapper[4605]: I1001 13:45:17.241502 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:17 crc kubenswrapper[4605]: I1001 13:45:17.241552 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:17 crc kubenswrapper[4605]: I1001 13:45:17.241563 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:17 crc kubenswrapper[4605]: I1001 13:45:17.241581 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:17 crc kubenswrapper[4605]: I1001 13:45:17.241592 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:17Z","lastTransitionTime":"2025-10-01T13:45:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:17 crc kubenswrapper[4605]: I1001 13:45:17.344616 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:17 crc kubenswrapper[4605]: I1001 13:45:17.344661 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:17 crc kubenswrapper[4605]: I1001 13:45:17.344674 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:17 crc kubenswrapper[4605]: I1001 13:45:17.344693 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:17 crc kubenswrapper[4605]: I1001 13:45:17.344706 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:17Z","lastTransitionTime":"2025-10-01T13:45:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:17 crc kubenswrapper[4605]: I1001 13:45:17.378286 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8c172ce5-f64e-417d-9fc7-e06c5e443fbc-metrics-certs\") pod \"network-metrics-daemon-m7ph7\" (UID: \"8c172ce5-f64e-417d-9fc7-e06c5e443fbc\") " pod="openshift-multus/network-metrics-daemon-m7ph7" Oct 01 13:45:17 crc kubenswrapper[4605]: E1001 13:45:17.378541 4605 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 01 13:45:17 crc kubenswrapper[4605]: E1001 13:45:17.378894 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8c172ce5-f64e-417d-9fc7-e06c5e443fbc-metrics-certs podName:8c172ce5-f64e-417d-9fc7-e06c5e443fbc nodeName:}" failed. No retries permitted until 2025-10-01 13:45:21.37887127 +0000 UTC m=+44.122847478 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/8c172ce5-f64e-417d-9fc7-e06c5e443fbc-metrics-certs") pod "network-metrics-daemon-m7ph7" (UID: "8c172ce5-f64e-417d-9fc7-e06c5e443fbc") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 01 13:45:17 crc kubenswrapper[4605]: I1001 13:45:17.447352 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:17 crc kubenswrapper[4605]: I1001 13:45:17.447400 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:17 crc kubenswrapper[4605]: I1001 13:45:17.447411 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:17 crc kubenswrapper[4605]: I1001 13:45:17.447432 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:17 crc kubenswrapper[4605]: I1001 13:45:17.447443 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:17Z","lastTransitionTime":"2025-10-01T13:45:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:17 crc kubenswrapper[4605]: I1001 13:45:17.550338 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:17 crc kubenswrapper[4605]: I1001 13:45:17.550386 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:17 crc kubenswrapper[4605]: I1001 13:45:17.550398 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:17 crc kubenswrapper[4605]: I1001 13:45:17.550426 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:17 crc kubenswrapper[4605]: I1001 13:45:17.550439 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:17Z","lastTransitionTime":"2025-10-01T13:45:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:17 crc kubenswrapper[4605]: I1001 13:45:17.652742 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:17 crc kubenswrapper[4605]: I1001 13:45:17.652782 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:17 crc kubenswrapper[4605]: I1001 13:45:17.652792 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:17 crc kubenswrapper[4605]: I1001 13:45:17.652810 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:17 crc kubenswrapper[4605]: I1001 13:45:17.652823 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:17Z","lastTransitionTime":"2025-10-01T13:45:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:17 crc kubenswrapper[4605]: I1001 13:45:17.760542 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:17 crc kubenswrapper[4605]: I1001 13:45:17.760643 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:17 crc kubenswrapper[4605]: I1001 13:45:17.760776 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:17 crc kubenswrapper[4605]: I1001 13:45:17.761784 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:17 crc kubenswrapper[4605]: I1001 13:45:17.761832 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:17Z","lastTransitionTime":"2025-10-01T13:45:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:17 crc kubenswrapper[4605]: I1001 13:45:17.864392 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:17 crc kubenswrapper[4605]: I1001 13:45:17.864441 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:17 crc kubenswrapper[4605]: I1001 13:45:17.864452 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:17 crc kubenswrapper[4605]: I1001 13:45:17.864470 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:17 crc kubenswrapper[4605]: I1001 13:45:17.864487 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:17Z","lastTransitionTime":"2025-10-01T13:45:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:17 crc kubenswrapper[4605]: I1001 13:45:17.926150 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 13:45:17 crc kubenswrapper[4605]: I1001 13:45:17.926217 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 13:45:17 crc kubenswrapper[4605]: I1001 13:45:17.926217 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:45:17 crc kubenswrapper[4605]: E1001 13:45:17.926300 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 13:45:17 crc kubenswrapper[4605]: E1001 13:45:17.926425 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 13:45:17 crc kubenswrapper[4605]: I1001 13:45:17.926452 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-m7ph7" Oct 01 13:45:17 crc kubenswrapper[4605]: E1001 13:45:17.926503 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 13:45:17 crc kubenswrapper[4605]: E1001 13:45:17.926556 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-m7ph7" podUID="8c172ce5-f64e-417d-9fc7-e06c5e443fbc" Oct 01 13:45:17 crc kubenswrapper[4605]: I1001 13:45:17.944003 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:17Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:17 crc kubenswrapper[4605]: I1001 13:45:17.957526 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eda516c-c3a2-4e46-b9c2-b603ebc2d618\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c15c835868f9e04648db64fcf271a4c212d406e7bcc7cfca15167b853a02a70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d0dbfcc18f63ddc136fc049c023f326cb882adbcf1123f1ec5d4b884ae3970a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed56fd63f436435206d3574740efba75e8a94582fe128388e03b398131acd4ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80bd23f5be42dc5714bba2b0f742b8aa9fdd60540db1048054e76c00f356b240\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83498000b4c2e2ea1b498069626fa6916e9fc5e95a3ecf0136b2cb0ca8a409b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759326292\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759326291\\\\\\\\\\\\\\\" (2025-10-01 12:44:51 +0000 UTC to 2026-10-01 12:44:51 +0000 UTC (now=2025-10-01 13:44:57.347830124 +0000 UTC))\\\\\\\"\\\\nI1001 13:44:57.347872 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1001 13:44:57.347895 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1001 13:44:57.347920 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347949 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347989 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4229714179/tls.crt::/tmp/serving-cert-4229714179/tls.key\\\\\\\"\\\\nI1001 13:44:57.348126 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348139 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1001 13:44:57.348152 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348158 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1001 13:44:57.348154 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1001 13:44:57.348212 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1001 13:44:57.348222 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF1001 13:44:57.350577 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:41Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://113f3599c725570c4484ed005921c30a43db5ffec24b72907c6c7546453fe363\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:17Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:17 crc kubenswrapper[4605]: I1001 13:45:17.967074 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:17 crc kubenswrapper[4605]: I1001 13:45:17.967158 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:17 crc kubenswrapper[4605]: I1001 13:45:17.967177 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:17 crc kubenswrapper[4605]: I1001 13:45:17.967199 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:17 crc kubenswrapper[4605]: I1001 13:45:17.967211 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:17Z","lastTransitionTime":"2025-10-01T13:45:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:17 crc kubenswrapper[4605]: I1001 13:45:17.970663 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ca1d91d-0902-4d3a-b66a-a556b5009d8b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acf6d9c6b834cf378303c7ee6e1af3f3cde2502d8f28a6e5d3ec33deb69434b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4200a3723d31af3d800fca144949b047d3ef2960d856f286899351523593061\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://671dc002b0ca1a50b36373cbf0a8971b0f751989c9f19acedb524b09afd53517\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ab08a748b9c3040ea1af963f8ebeef630d7fb260122baba05229615424850d6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:17Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:17 crc kubenswrapper[4605]: I1001 13:45:17.987234 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c3d55cda00f6c5763662b3f96b4c36f1fb8c220fcd14ff3469b565deac718a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e6578a529a458eab6242d8f667520303c65bd53ceba7598d4c9680c7a93bace\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:17Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:18 crc kubenswrapper[4605]: I1001 13:45:18.013297 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:18Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:18 crc kubenswrapper[4605]: I1001 13:45:18.027078 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:18Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:18 crc kubenswrapper[4605]: I1001 13:45:18.038019 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-m7ph7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8c172ce5-f64e-417d-9fc7-e06c5e443fbc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5krq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5krq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:45:13Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-m7ph7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:18Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:18 crc kubenswrapper[4605]: I1001 13:45:18.049829 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gqbr2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a6400c9-7945-44a6-b37d-e94811fc9754\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c32729bcc338f810b05ad3ac0cd10feeeaff031cd81540b79fec759bbc2b419\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2jrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51a4fc25d3fb27e7ea7fba17367d9a0b3fefa388d306e3589ec13e3cf6e61452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2jrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:45:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-gqbr2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:18Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:18 crc kubenswrapper[4605]: I1001 13:45:18.065888 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3023060-c8ae-492b-b1cb-a418d9a8e59f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd1361d1d9cb03a3942918266a1e85d3e370eabdfa7b7b1e40971995928187b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae5b8e3f4bd159c632b04545707c7140ba6fcee21a3a3847d5e7f2b9e41b9178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdjh7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:18Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:18 crc kubenswrapper[4605]: I1001 13:45:18.069821 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:18 crc kubenswrapper[4605]: I1001 13:45:18.069864 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:18 crc kubenswrapper[4605]: I1001 13:45:18.069874 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:18 crc kubenswrapper[4605]: I1001 13:45:18.069891 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:18 crc kubenswrapper[4605]: I1001 13:45:18.069901 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:18Z","lastTransitionTime":"2025-10-01T13:45:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:18 crc kubenswrapper[4605]: I1001 13:45:18.082014 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xclfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9fc4aae-03cb-458d-83cb-1a3ab9fa9639\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f8eab67511426f65e14781420480f85ea6b0d9ab25f6846a68820a1ded0053f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://14d2a7aa9a0027ef28b5ec7b288ce0a5367c53f616ed3e75216d07c9809a86a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14d2a7aa9a0027ef28b5ec7b288ce0a5367c53f616ed3e75216d07c9809a86a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0abe21aad2433d34c4a769e87e28f3c479511a4580df607d1496ed33c85385c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0abe21aad2433d34c4a769e87e28f3c479511a4580df607d1496ed33c85385c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b870108ecd4816a8d0d3d32f4b2384246c0b4537639e9cd04d660fea3246244\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b870108ecd4816a8d0d3d32f4b2384246c0b4537639e9cd04d660fea3246244\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b96314b06953b56383a86305cf1702c5f02add621d7981a36444d2d3998cf0f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b96314b06953b56383a86305cf1702c5f02add621d7981a36444d2d3998cf0f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f0381694561100e524cc380f1a2c6050c7192f27ec8f27ad2e41abce969c1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24f0381694561100e524cc380f1a2c6050c7192f27ec8f27ad2e41abce969c1c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfb30493a0c650f9536bba6417da0a582b1b23b6488223359856c12d1024d57f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfb30493a0c650f9536bba6417da0a582b1b23b6488223359856c12d1024d57f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xclfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:18Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:18 crc kubenswrapper[4605]: I1001 13:45:18.095849 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wgx5p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c2ca71f-4cb0-4852-927d-af69be5d77f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1180cac382ded8ae1a7be2e5738d96beceed10f750d31e36ae1520416a71e8dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7gtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wgx5p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:18Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:18 crc kubenswrapper[4605]: I1001 13:45:18.122932 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://56f2691df87d1c843a73c06c39c7ad1d5477fb1f10c6f3f797f658e7d0c0d04d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09ecc57bc8186c325ef0bd38c17882024dda3714b52f9b0295a75ed93b2d6736\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f6f39aa4d204b41067d2705ce828e79752ca4e5b7689821e34c98f5041ce925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddba08349bbb0f7040bb04f37d4b5a137de327a5c4a7a5689396a2a1925b062a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://98143f416c776bf79ee24b42b70d88d1cbe4528bd7a1c0fc886a8f6e711c6400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8442849a4d37d618e701a2ea4c1dd33944894765c874fd5635c74d37a7f411b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://516a2c90123655832749bb1f379c3ac1800cbe59053cf56544424f8dabcee07a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://516a2c90123655832749bb1f379c3ac1800cbe59053cf56544424f8dabcee07a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T13:45:11Z\\\",\\\"message\\\":\\\"openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:serving-cert service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc0076549ef \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,Protocol:TCP,Port:443,TargetPort:{0 8443 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{app: service-ca-operator,},ClusterIP:10.217.4.40,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.4.40],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nF1001 13:45:11.204670 5928 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 0x1e60020 0x1e5ffc0} was not added to shared informer because it has stopped already, failed to\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:10Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-kzv4p_openshift-ovn-kubernetes(e0b90c02-c41c-4f5b-ae0a-c6444435a3ae)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d1219f1caadb9f0992fa56b158a23ace3616d178360c4690657deb351a3ccaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-kzv4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:18Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:18 crc kubenswrapper[4605]: I1001 13:45:18.140480 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d3d0d59e6a31efc844c56e1ad43cd326a7b2f1844784f2814469e36394cf377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:18Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:18 crc kubenswrapper[4605]: I1001 13:45:18.156167 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64e2eda677e75448f2e7e3fd477052f3a596e6c11d745848becc6c22f133e6b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:18Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:18 crc kubenswrapper[4605]: I1001 13:45:18.168931 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6zb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83630902-b99b-4944-81a4-487e9584e0c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0762e1125229327b00202fd05bc17fd641b76f2421e20d0672b3e2d3b0f7538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jt4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6zb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:18Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:18 crc kubenswrapper[4605]: I1001 13:45:18.173016 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:18 crc kubenswrapper[4605]: I1001 13:45:18.173236 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:18 crc kubenswrapper[4605]: I1001 13:45:18.173297 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:18 crc kubenswrapper[4605]: I1001 13:45:18.173357 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:18 crc kubenswrapper[4605]: I1001 13:45:18.173414 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:18Z","lastTransitionTime":"2025-10-01T13:45:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:18 crc kubenswrapper[4605]: I1001 13:45:18.183513 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jvqzn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4d5a988-e2c8-47db-b738-cb43467b1bfb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17326e690c100180a8e3d1a0180522378ef995bd1f5cba52d6f147a958351118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-985lh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:45:07Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jvqzn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:18Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:18 crc kubenswrapper[4605]: I1001 13:45:18.277139 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:18 crc kubenswrapper[4605]: I1001 13:45:18.277463 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:18 crc kubenswrapper[4605]: I1001 13:45:18.277588 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:18 crc kubenswrapper[4605]: I1001 13:45:18.277714 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:18 crc kubenswrapper[4605]: I1001 13:45:18.277775 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:18Z","lastTransitionTime":"2025-10-01T13:45:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:18 crc kubenswrapper[4605]: I1001 13:45:18.380971 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:18 crc kubenswrapper[4605]: I1001 13:45:18.381025 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:18 crc kubenswrapper[4605]: I1001 13:45:18.381037 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:18 crc kubenswrapper[4605]: I1001 13:45:18.381055 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:18 crc kubenswrapper[4605]: I1001 13:45:18.381066 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:18Z","lastTransitionTime":"2025-10-01T13:45:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:18 crc kubenswrapper[4605]: I1001 13:45:18.484158 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:18 crc kubenswrapper[4605]: I1001 13:45:18.484222 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:18 crc kubenswrapper[4605]: I1001 13:45:18.484237 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:18 crc kubenswrapper[4605]: I1001 13:45:18.484258 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:18 crc kubenswrapper[4605]: I1001 13:45:18.484276 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:18Z","lastTransitionTime":"2025-10-01T13:45:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:18 crc kubenswrapper[4605]: I1001 13:45:18.587583 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:18 crc kubenswrapper[4605]: I1001 13:45:18.587920 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:18 crc kubenswrapper[4605]: I1001 13:45:18.587979 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:18 crc kubenswrapper[4605]: I1001 13:45:18.588048 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:18 crc kubenswrapper[4605]: I1001 13:45:18.588129 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:18Z","lastTransitionTime":"2025-10-01T13:45:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:18 crc kubenswrapper[4605]: I1001 13:45:18.691482 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:18 crc kubenswrapper[4605]: I1001 13:45:18.691521 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:18 crc kubenswrapper[4605]: I1001 13:45:18.691532 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:18 crc kubenswrapper[4605]: I1001 13:45:18.691547 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:18 crc kubenswrapper[4605]: I1001 13:45:18.691556 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:18Z","lastTransitionTime":"2025-10-01T13:45:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:18 crc kubenswrapper[4605]: I1001 13:45:18.794032 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:18 crc kubenswrapper[4605]: I1001 13:45:18.794066 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:18 crc kubenswrapper[4605]: I1001 13:45:18.794075 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:18 crc kubenswrapper[4605]: I1001 13:45:18.794111 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:18 crc kubenswrapper[4605]: I1001 13:45:18.794121 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:18Z","lastTransitionTime":"2025-10-01T13:45:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:18 crc kubenswrapper[4605]: I1001 13:45:18.896486 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:18 crc kubenswrapper[4605]: I1001 13:45:18.896550 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:18 crc kubenswrapper[4605]: I1001 13:45:18.896589 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:18 crc kubenswrapper[4605]: I1001 13:45:18.896625 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:18 crc kubenswrapper[4605]: I1001 13:45:18.896652 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:18Z","lastTransitionTime":"2025-10-01T13:45:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:18 crc kubenswrapper[4605]: I1001 13:45:18.999528 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:18 crc kubenswrapper[4605]: I1001 13:45:18.999603 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:18 crc kubenswrapper[4605]: I1001 13:45:18.999625 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:18 crc kubenswrapper[4605]: I1001 13:45:18.999653 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:18 crc kubenswrapper[4605]: I1001 13:45:18.999685 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:18Z","lastTransitionTime":"2025-10-01T13:45:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:19 crc kubenswrapper[4605]: I1001 13:45:19.102326 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:19 crc kubenswrapper[4605]: I1001 13:45:19.102377 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:19 crc kubenswrapper[4605]: I1001 13:45:19.102390 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:19 crc kubenswrapper[4605]: I1001 13:45:19.102409 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:19 crc kubenswrapper[4605]: I1001 13:45:19.102425 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:19Z","lastTransitionTime":"2025-10-01T13:45:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:19 crc kubenswrapper[4605]: I1001 13:45:19.205624 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:19 crc kubenswrapper[4605]: I1001 13:45:19.205683 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:19 crc kubenswrapper[4605]: I1001 13:45:19.205696 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:19 crc kubenswrapper[4605]: I1001 13:45:19.205717 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:19 crc kubenswrapper[4605]: I1001 13:45:19.205730 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:19Z","lastTransitionTime":"2025-10-01T13:45:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:19 crc kubenswrapper[4605]: I1001 13:45:19.309688 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:19 crc kubenswrapper[4605]: I1001 13:45:19.309767 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:19 crc kubenswrapper[4605]: I1001 13:45:19.309789 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:19 crc kubenswrapper[4605]: I1001 13:45:19.309887 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:19 crc kubenswrapper[4605]: I1001 13:45:19.309916 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:19Z","lastTransitionTime":"2025-10-01T13:45:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:19 crc kubenswrapper[4605]: I1001 13:45:19.412727 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:19 crc kubenswrapper[4605]: I1001 13:45:19.412781 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:19 crc kubenswrapper[4605]: I1001 13:45:19.412794 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:19 crc kubenswrapper[4605]: I1001 13:45:19.412814 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:19 crc kubenswrapper[4605]: I1001 13:45:19.412829 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:19Z","lastTransitionTime":"2025-10-01T13:45:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:19 crc kubenswrapper[4605]: I1001 13:45:19.515958 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:19 crc kubenswrapper[4605]: I1001 13:45:19.516011 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:19 crc kubenswrapper[4605]: I1001 13:45:19.516025 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:19 crc kubenswrapper[4605]: I1001 13:45:19.516043 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:19 crc kubenswrapper[4605]: I1001 13:45:19.516055 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:19Z","lastTransitionTime":"2025-10-01T13:45:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:19 crc kubenswrapper[4605]: I1001 13:45:19.619184 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:19 crc kubenswrapper[4605]: I1001 13:45:19.619242 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:19 crc kubenswrapper[4605]: I1001 13:45:19.619255 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:19 crc kubenswrapper[4605]: I1001 13:45:19.619276 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:19 crc kubenswrapper[4605]: I1001 13:45:19.619291 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:19Z","lastTransitionTime":"2025-10-01T13:45:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:19 crc kubenswrapper[4605]: I1001 13:45:19.722023 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:19 crc kubenswrapper[4605]: I1001 13:45:19.722072 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:19 crc kubenswrapper[4605]: I1001 13:45:19.722082 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:19 crc kubenswrapper[4605]: I1001 13:45:19.722124 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:19 crc kubenswrapper[4605]: I1001 13:45:19.722326 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:19Z","lastTransitionTime":"2025-10-01T13:45:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:19 crc kubenswrapper[4605]: I1001 13:45:19.825239 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:19 crc kubenswrapper[4605]: I1001 13:45:19.825283 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:19 crc kubenswrapper[4605]: I1001 13:45:19.825298 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:19 crc kubenswrapper[4605]: I1001 13:45:19.825320 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:19 crc kubenswrapper[4605]: I1001 13:45:19.825334 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:19Z","lastTransitionTime":"2025-10-01T13:45:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:19 crc kubenswrapper[4605]: I1001 13:45:19.926375 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 13:45:19 crc kubenswrapper[4605]: E1001 13:45:19.926536 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 13:45:19 crc kubenswrapper[4605]: I1001 13:45:19.926957 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 13:45:19 crc kubenswrapper[4605]: I1001 13:45:19.927028 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:45:19 crc kubenswrapper[4605]: E1001 13:45:19.927168 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 13:45:19 crc kubenswrapper[4605]: E1001 13:45:19.927396 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 13:45:19 crc kubenswrapper[4605]: I1001 13:45:19.928927 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:19 crc kubenswrapper[4605]: I1001 13:45:19.928959 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:19 crc kubenswrapper[4605]: I1001 13:45:19.928978 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:19 crc kubenswrapper[4605]: I1001 13:45:19.928994 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:19 crc kubenswrapper[4605]: I1001 13:45:19.929006 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:19Z","lastTransitionTime":"2025-10-01T13:45:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:19 crc kubenswrapper[4605]: I1001 13:45:19.929022 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-m7ph7" Oct 01 13:45:19 crc kubenswrapper[4605]: E1001 13:45:19.929352 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-m7ph7" podUID="8c172ce5-f64e-417d-9fc7-e06c5e443fbc" Oct 01 13:45:20 crc kubenswrapper[4605]: I1001 13:45:20.031369 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:20 crc kubenswrapper[4605]: I1001 13:45:20.031756 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:20 crc kubenswrapper[4605]: I1001 13:45:20.031839 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:20 crc kubenswrapper[4605]: I1001 13:45:20.031934 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:20 crc kubenswrapper[4605]: I1001 13:45:20.032002 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:20Z","lastTransitionTime":"2025-10-01T13:45:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:20 crc kubenswrapper[4605]: I1001 13:45:20.135423 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:20 crc kubenswrapper[4605]: I1001 13:45:20.135468 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:20 crc kubenswrapper[4605]: I1001 13:45:20.135479 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:20 crc kubenswrapper[4605]: I1001 13:45:20.135504 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:20 crc kubenswrapper[4605]: I1001 13:45:20.135517 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:20Z","lastTransitionTime":"2025-10-01T13:45:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:20 crc kubenswrapper[4605]: I1001 13:45:20.238478 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:20 crc kubenswrapper[4605]: I1001 13:45:20.238544 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:20 crc kubenswrapper[4605]: I1001 13:45:20.238578 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:20 crc kubenswrapper[4605]: I1001 13:45:20.238613 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:20 crc kubenswrapper[4605]: I1001 13:45:20.238689 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:20Z","lastTransitionTime":"2025-10-01T13:45:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:20 crc kubenswrapper[4605]: I1001 13:45:20.342121 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:20 crc kubenswrapper[4605]: I1001 13:45:20.342172 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:20 crc kubenswrapper[4605]: I1001 13:45:20.342190 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:20 crc kubenswrapper[4605]: I1001 13:45:20.342214 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:20 crc kubenswrapper[4605]: I1001 13:45:20.342232 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:20Z","lastTransitionTime":"2025-10-01T13:45:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:20 crc kubenswrapper[4605]: I1001 13:45:20.446413 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:20 crc kubenswrapper[4605]: I1001 13:45:20.446571 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:20 crc kubenswrapper[4605]: I1001 13:45:20.446595 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:20 crc kubenswrapper[4605]: I1001 13:45:20.446622 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:20 crc kubenswrapper[4605]: I1001 13:45:20.446639 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:20Z","lastTransitionTime":"2025-10-01T13:45:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:20 crc kubenswrapper[4605]: I1001 13:45:20.552602 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:20 crc kubenswrapper[4605]: I1001 13:45:20.553566 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:20 crc kubenswrapper[4605]: I1001 13:45:20.553644 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:20 crc kubenswrapper[4605]: I1001 13:45:20.553701 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:20 crc kubenswrapper[4605]: I1001 13:45:20.553731 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:20Z","lastTransitionTime":"2025-10-01T13:45:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:20 crc kubenswrapper[4605]: I1001 13:45:20.657804 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:20 crc kubenswrapper[4605]: I1001 13:45:20.657866 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:20 crc kubenswrapper[4605]: I1001 13:45:20.657880 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:20 crc kubenswrapper[4605]: I1001 13:45:20.657900 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:20 crc kubenswrapper[4605]: I1001 13:45:20.657923 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:20Z","lastTransitionTime":"2025-10-01T13:45:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:20 crc kubenswrapper[4605]: I1001 13:45:20.761187 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:20 crc kubenswrapper[4605]: I1001 13:45:20.761225 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:20 crc kubenswrapper[4605]: I1001 13:45:20.761237 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:20 crc kubenswrapper[4605]: I1001 13:45:20.761257 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:20 crc kubenswrapper[4605]: I1001 13:45:20.761268 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:20Z","lastTransitionTime":"2025-10-01T13:45:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:20 crc kubenswrapper[4605]: I1001 13:45:20.863587 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:20 crc kubenswrapper[4605]: I1001 13:45:20.863653 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:20 crc kubenswrapper[4605]: I1001 13:45:20.863671 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:20 crc kubenswrapper[4605]: I1001 13:45:20.863699 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:20 crc kubenswrapper[4605]: I1001 13:45:20.863724 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:20Z","lastTransitionTime":"2025-10-01T13:45:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:20 crc kubenswrapper[4605]: I1001 13:45:20.967356 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:20 crc kubenswrapper[4605]: I1001 13:45:20.967433 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:20 crc kubenswrapper[4605]: I1001 13:45:20.967446 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:20 crc kubenswrapper[4605]: I1001 13:45:20.967469 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:20 crc kubenswrapper[4605]: I1001 13:45:20.967484 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:20Z","lastTransitionTime":"2025-10-01T13:45:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:21 crc kubenswrapper[4605]: I1001 13:45:21.070637 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:21 crc kubenswrapper[4605]: I1001 13:45:21.070684 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:21 crc kubenswrapper[4605]: I1001 13:45:21.070693 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:21 crc kubenswrapper[4605]: I1001 13:45:21.070708 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:21 crc kubenswrapper[4605]: I1001 13:45:21.070720 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:21Z","lastTransitionTime":"2025-10-01T13:45:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:21 crc kubenswrapper[4605]: I1001 13:45:21.173578 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:21 crc kubenswrapper[4605]: I1001 13:45:21.173649 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:21 crc kubenswrapper[4605]: I1001 13:45:21.173669 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:21 crc kubenswrapper[4605]: I1001 13:45:21.173691 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:21 crc kubenswrapper[4605]: I1001 13:45:21.173707 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:21Z","lastTransitionTime":"2025-10-01T13:45:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:21 crc kubenswrapper[4605]: I1001 13:45:21.277156 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:21 crc kubenswrapper[4605]: I1001 13:45:21.277220 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:21 crc kubenswrapper[4605]: I1001 13:45:21.277240 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:21 crc kubenswrapper[4605]: I1001 13:45:21.277268 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:21 crc kubenswrapper[4605]: I1001 13:45:21.277287 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:21Z","lastTransitionTime":"2025-10-01T13:45:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:21 crc kubenswrapper[4605]: I1001 13:45:21.380869 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:21 crc kubenswrapper[4605]: I1001 13:45:21.380936 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:21 crc kubenswrapper[4605]: I1001 13:45:21.380956 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:21 crc kubenswrapper[4605]: I1001 13:45:21.380983 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:21 crc kubenswrapper[4605]: I1001 13:45:21.381006 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:21Z","lastTransitionTime":"2025-10-01T13:45:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:21 crc kubenswrapper[4605]: I1001 13:45:21.424898 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8c172ce5-f64e-417d-9fc7-e06c5e443fbc-metrics-certs\") pod \"network-metrics-daemon-m7ph7\" (UID: \"8c172ce5-f64e-417d-9fc7-e06c5e443fbc\") " pod="openshift-multus/network-metrics-daemon-m7ph7" Oct 01 13:45:21 crc kubenswrapper[4605]: E1001 13:45:21.425144 4605 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 01 13:45:21 crc kubenswrapper[4605]: E1001 13:45:21.425291 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8c172ce5-f64e-417d-9fc7-e06c5e443fbc-metrics-certs podName:8c172ce5-f64e-417d-9fc7-e06c5e443fbc nodeName:}" failed. No retries permitted until 2025-10-01 13:45:29.425250029 +0000 UTC m=+52.169226267 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/8c172ce5-f64e-417d-9fc7-e06c5e443fbc-metrics-certs") pod "network-metrics-daemon-m7ph7" (UID: "8c172ce5-f64e-417d-9fc7-e06c5e443fbc") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 01 13:45:21 crc kubenswrapper[4605]: I1001 13:45:21.484830 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:21 crc kubenswrapper[4605]: I1001 13:45:21.484887 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:21 crc kubenswrapper[4605]: I1001 13:45:21.484900 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:21 crc kubenswrapper[4605]: I1001 13:45:21.484923 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:21 crc kubenswrapper[4605]: I1001 13:45:21.484938 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:21Z","lastTransitionTime":"2025-10-01T13:45:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:21 crc kubenswrapper[4605]: I1001 13:45:21.587564 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:21 crc kubenswrapper[4605]: I1001 13:45:21.587645 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:21 crc kubenswrapper[4605]: I1001 13:45:21.587655 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:21 crc kubenswrapper[4605]: I1001 13:45:21.587672 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:21 crc kubenswrapper[4605]: I1001 13:45:21.587685 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:21Z","lastTransitionTime":"2025-10-01T13:45:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:21 crc kubenswrapper[4605]: I1001 13:45:21.691717 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:21 crc kubenswrapper[4605]: I1001 13:45:21.691789 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:21 crc kubenswrapper[4605]: I1001 13:45:21.691807 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:21 crc kubenswrapper[4605]: I1001 13:45:21.691835 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:21 crc kubenswrapper[4605]: I1001 13:45:21.691855 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:21Z","lastTransitionTime":"2025-10-01T13:45:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:21 crc kubenswrapper[4605]: I1001 13:45:21.796179 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:21 crc kubenswrapper[4605]: I1001 13:45:21.796235 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:21 crc kubenswrapper[4605]: I1001 13:45:21.796253 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:21 crc kubenswrapper[4605]: I1001 13:45:21.796280 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:21 crc kubenswrapper[4605]: I1001 13:45:21.796300 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:21Z","lastTransitionTime":"2025-10-01T13:45:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:21 crc kubenswrapper[4605]: I1001 13:45:21.900870 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:21 crc kubenswrapper[4605]: I1001 13:45:21.900923 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:21 crc kubenswrapper[4605]: I1001 13:45:21.900936 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:21 crc kubenswrapper[4605]: I1001 13:45:21.900954 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:21 crc kubenswrapper[4605]: I1001 13:45:21.900966 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:21Z","lastTransitionTime":"2025-10-01T13:45:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:21 crc kubenswrapper[4605]: I1001 13:45:21.926209 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 13:45:21 crc kubenswrapper[4605]: I1001 13:45:21.926343 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:45:21 crc kubenswrapper[4605]: I1001 13:45:21.926387 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-m7ph7" Oct 01 13:45:21 crc kubenswrapper[4605]: E1001 13:45:21.926415 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 13:45:21 crc kubenswrapper[4605]: I1001 13:45:21.926350 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 13:45:21 crc kubenswrapper[4605]: E1001 13:45:21.926519 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 13:45:21 crc kubenswrapper[4605]: E1001 13:45:21.926673 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 13:45:21 crc kubenswrapper[4605]: E1001 13:45:21.926762 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-m7ph7" podUID="8c172ce5-f64e-417d-9fc7-e06c5e443fbc" Oct 01 13:45:22 crc kubenswrapper[4605]: I1001 13:45:22.003982 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:22 crc kubenswrapper[4605]: I1001 13:45:22.004032 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:22 crc kubenswrapper[4605]: I1001 13:45:22.004046 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:22 crc kubenswrapper[4605]: I1001 13:45:22.004064 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:22 crc kubenswrapper[4605]: I1001 13:45:22.004083 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:22Z","lastTransitionTime":"2025-10-01T13:45:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:22 crc kubenswrapper[4605]: I1001 13:45:22.108049 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:22 crc kubenswrapper[4605]: I1001 13:45:22.108119 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:22 crc kubenswrapper[4605]: I1001 13:45:22.108133 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:22 crc kubenswrapper[4605]: I1001 13:45:22.108154 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:22 crc kubenswrapper[4605]: I1001 13:45:22.108169 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:22Z","lastTransitionTime":"2025-10-01T13:45:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:22 crc kubenswrapper[4605]: I1001 13:45:22.211688 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:22 crc kubenswrapper[4605]: I1001 13:45:22.211726 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:22 crc kubenswrapper[4605]: I1001 13:45:22.211736 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:22 crc kubenswrapper[4605]: I1001 13:45:22.211752 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:22 crc kubenswrapper[4605]: I1001 13:45:22.211767 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:22Z","lastTransitionTime":"2025-10-01T13:45:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:22 crc kubenswrapper[4605]: I1001 13:45:22.315475 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:22 crc kubenswrapper[4605]: I1001 13:45:22.315533 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:22 crc kubenswrapper[4605]: I1001 13:45:22.315548 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:22 crc kubenswrapper[4605]: I1001 13:45:22.315566 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:22 crc kubenswrapper[4605]: I1001 13:45:22.315577 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:22Z","lastTransitionTime":"2025-10-01T13:45:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:22 crc kubenswrapper[4605]: I1001 13:45:22.419301 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:22 crc kubenswrapper[4605]: I1001 13:45:22.419365 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:22 crc kubenswrapper[4605]: I1001 13:45:22.419382 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:22 crc kubenswrapper[4605]: I1001 13:45:22.419410 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:22 crc kubenswrapper[4605]: I1001 13:45:22.419430 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:22Z","lastTransitionTime":"2025-10-01T13:45:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:22 crc kubenswrapper[4605]: I1001 13:45:22.522428 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:22 crc kubenswrapper[4605]: I1001 13:45:22.522483 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:22 crc kubenswrapper[4605]: I1001 13:45:22.522497 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:22 crc kubenswrapper[4605]: I1001 13:45:22.522516 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:22 crc kubenswrapper[4605]: I1001 13:45:22.522528 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:22Z","lastTransitionTime":"2025-10-01T13:45:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:22 crc kubenswrapper[4605]: I1001 13:45:22.625254 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:22 crc kubenswrapper[4605]: I1001 13:45:22.625294 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:22 crc kubenswrapper[4605]: I1001 13:45:22.625303 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:22 crc kubenswrapper[4605]: I1001 13:45:22.625319 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:22 crc kubenswrapper[4605]: I1001 13:45:22.625332 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:22Z","lastTransitionTime":"2025-10-01T13:45:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:22 crc kubenswrapper[4605]: I1001 13:45:22.728310 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:22 crc kubenswrapper[4605]: I1001 13:45:22.728368 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:22 crc kubenswrapper[4605]: I1001 13:45:22.728377 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:22 crc kubenswrapper[4605]: I1001 13:45:22.728394 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:22 crc kubenswrapper[4605]: I1001 13:45:22.728403 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:22Z","lastTransitionTime":"2025-10-01T13:45:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:22 crc kubenswrapper[4605]: I1001 13:45:22.831619 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:22 crc kubenswrapper[4605]: I1001 13:45:22.831998 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:22 crc kubenswrapper[4605]: I1001 13:45:22.832187 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:22 crc kubenswrapper[4605]: I1001 13:45:22.832293 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:22 crc kubenswrapper[4605]: I1001 13:45:22.832388 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:22Z","lastTransitionTime":"2025-10-01T13:45:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:22 crc kubenswrapper[4605]: I1001 13:45:22.935143 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:22 crc kubenswrapper[4605]: I1001 13:45:22.935217 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:22 crc kubenswrapper[4605]: I1001 13:45:22.935237 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:22 crc kubenswrapper[4605]: I1001 13:45:22.935268 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:22 crc kubenswrapper[4605]: I1001 13:45:22.935289 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:22Z","lastTransitionTime":"2025-10-01T13:45:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:23 crc kubenswrapper[4605]: I1001 13:45:23.038652 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:23 crc kubenswrapper[4605]: I1001 13:45:23.038735 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:23 crc kubenswrapper[4605]: I1001 13:45:23.038778 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:23 crc kubenswrapper[4605]: I1001 13:45:23.038813 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:23 crc kubenswrapper[4605]: I1001 13:45:23.038838 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:23Z","lastTransitionTime":"2025-10-01T13:45:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:23 crc kubenswrapper[4605]: I1001 13:45:23.141888 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:23 crc kubenswrapper[4605]: I1001 13:45:23.141949 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:23 crc kubenswrapper[4605]: I1001 13:45:23.141966 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:23 crc kubenswrapper[4605]: I1001 13:45:23.141993 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:23 crc kubenswrapper[4605]: I1001 13:45:23.142014 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:23Z","lastTransitionTime":"2025-10-01T13:45:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:23 crc kubenswrapper[4605]: I1001 13:45:23.246494 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:23 crc kubenswrapper[4605]: I1001 13:45:23.246569 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:23 crc kubenswrapper[4605]: I1001 13:45:23.246589 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:23 crc kubenswrapper[4605]: I1001 13:45:23.246617 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:23 crc kubenswrapper[4605]: I1001 13:45:23.246639 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:23Z","lastTransitionTime":"2025-10-01T13:45:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:23 crc kubenswrapper[4605]: I1001 13:45:23.349346 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:23 crc kubenswrapper[4605]: I1001 13:45:23.349402 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:23 crc kubenswrapper[4605]: I1001 13:45:23.349414 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:23 crc kubenswrapper[4605]: I1001 13:45:23.349433 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:23 crc kubenswrapper[4605]: I1001 13:45:23.349447 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:23Z","lastTransitionTime":"2025-10-01T13:45:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:23 crc kubenswrapper[4605]: I1001 13:45:23.452390 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:23 crc kubenswrapper[4605]: I1001 13:45:23.452448 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:23 crc kubenswrapper[4605]: I1001 13:45:23.452461 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:23 crc kubenswrapper[4605]: I1001 13:45:23.452481 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:23 crc kubenswrapper[4605]: I1001 13:45:23.452491 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:23Z","lastTransitionTime":"2025-10-01T13:45:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:23 crc kubenswrapper[4605]: I1001 13:45:23.556029 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:23 crc kubenswrapper[4605]: I1001 13:45:23.556078 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:23 crc kubenswrapper[4605]: I1001 13:45:23.556089 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:23 crc kubenswrapper[4605]: I1001 13:45:23.556129 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:23 crc kubenswrapper[4605]: I1001 13:45:23.556145 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:23Z","lastTransitionTime":"2025-10-01T13:45:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:23 crc kubenswrapper[4605]: I1001 13:45:23.659301 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:23 crc kubenswrapper[4605]: I1001 13:45:23.660413 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:23 crc kubenswrapper[4605]: I1001 13:45:23.660445 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:23 crc kubenswrapper[4605]: I1001 13:45:23.660475 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:23 crc kubenswrapper[4605]: I1001 13:45:23.660499 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:23Z","lastTransitionTime":"2025-10-01T13:45:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:23 crc kubenswrapper[4605]: I1001 13:45:23.764069 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:23 crc kubenswrapper[4605]: I1001 13:45:23.764142 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:23 crc kubenswrapper[4605]: I1001 13:45:23.764153 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:23 crc kubenswrapper[4605]: I1001 13:45:23.764173 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:23 crc kubenswrapper[4605]: I1001 13:45:23.764186 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:23Z","lastTransitionTime":"2025-10-01T13:45:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:23 crc kubenswrapper[4605]: I1001 13:45:23.867017 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:23 crc kubenswrapper[4605]: I1001 13:45:23.867064 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:23 crc kubenswrapper[4605]: I1001 13:45:23.867080 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:23 crc kubenswrapper[4605]: I1001 13:45:23.867128 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:23 crc kubenswrapper[4605]: I1001 13:45:23.867150 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:23Z","lastTransitionTime":"2025-10-01T13:45:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:23 crc kubenswrapper[4605]: I1001 13:45:23.926830 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:45:23 crc kubenswrapper[4605]: I1001 13:45:23.926885 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 13:45:23 crc kubenswrapper[4605]: I1001 13:45:23.926830 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-m7ph7" Oct 01 13:45:23 crc kubenswrapper[4605]: E1001 13:45:23.927143 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 13:45:23 crc kubenswrapper[4605]: I1001 13:45:23.927169 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 13:45:23 crc kubenswrapper[4605]: E1001 13:45:23.927503 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 13:45:23 crc kubenswrapper[4605]: E1001 13:45:23.927660 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 13:45:23 crc kubenswrapper[4605]: E1001 13:45:23.927358 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-m7ph7" podUID="8c172ce5-f64e-417d-9fc7-e06c5e443fbc" Oct 01 13:45:23 crc kubenswrapper[4605]: I1001 13:45:23.970521 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:23 crc kubenswrapper[4605]: I1001 13:45:23.970595 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:23 crc kubenswrapper[4605]: I1001 13:45:23.970621 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:23 crc kubenswrapper[4605]: I1001 13:45:23.970654 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:23 crc kubenswrapper[4605]: I1001 13:45:23.970682 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:23Z","lastTransitionTime":"2025-10-01T13:45:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:24 crc kubenswrapper[4605]: I1001 13:45:24.073583 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:24 crc kubenswrapper[4605]: I1001 13:45:24.073631 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:24 crc kubenswrapper[4605]: I1001 13:45:24.073647 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:24 crc kubenswrapper[4605]: I1001 13:45:24.073668 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:24 crc kubenswrapper[4605]: I1001 13:45:24.073685 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:24Z","lastTransitionTime":"2025-10-01T13:45:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:24 crc kubenswrapper[4605]: I1001 13:45:24.176935 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:24 crc kubenswrapper[4605]: I1001 13:45:24.176982 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:24 crc kubenswrapper[4605]: I1001 13:45:24.176994 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:24 crc kubenswrapper[4605]: I1001 13:45:24.177011 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:24 crc kubenswrapper[4605]: I1001 13:45:24.177024 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:24Z","lastTransitionTime":"2025-10-01T13:45:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:24 crc kubenswrapper[4605]: I1001 13:45:24.280934 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:24 crc kubenswrapper[4605]: I1001 13:45:24.281022 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:24 crc kubenswrapper[4605]: I1001 13:45:24.281053 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:24 crc kubenswrapper[4605]: I1001 13:45:24.281132 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:24 crc kubenswrapper[4605]: I1001 13:45:24.281164 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:24Z","lastTransitionTime":"2025-10-01T13:45:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:24 crc kubenswrapper[4605]: I1001 13:45:24.384778 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:24 crc kubenswrapper[4605]: I1001 13:45:24.384821 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:24 crc kubenswrapper[4605]: I1001 13:45:24.384833 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:24 crc kubenswrapper[4605]: I1001 13:45:24.384855 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:24 crc kubenswrapper[4605]: I1001 13:45:24.384869 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:24Z","lastTransitionTime":"2025-10-01T13:45:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:24 crc kubenswrapper[4605]: I1001 13:45:24.467071 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:24 crc kubenswrapper[4605]: I1001 13:45:24.467146 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:24 crc kubenswrapper[4605]: I1001 13:45:24.467168 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:24 crc kubenswrapper[4605]: I1001 13:45:24.467232 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:24 crc kubenswrapper[4605]: I1001 13:45:24.467259 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:24Z","lastTransitionTime":"2025-10-01T13:45:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:24 crc kubenswrapper[4605]: E1001 13:45:24.485230 4605 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:24Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:24Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1d17ca42-5162-4e53-b9d0-0c11f7d91daa\\\",\\\"systemUUID\\\":\\\"1ac84113-1352-4ad6-8d32-f12829b39b5d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:24Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:24 crc kubenswrapper[4605]: I1001 13:45:24.489766 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:24 crc kubenswrapper[4605]: I1001 13:45:24.489819 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:24 crc kubenswrapper[4605]: I1001 13:45:24.489829 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:24 crc kubenswrapper[4605]: I1001 13:45:24.489845 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:24 crc kubenswrapper[4605]: I1001 13:45:24.489855 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:24Z","lastTransitionTime":"2025-10-01T13:45:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:24 crc kubenswrapper[4605]: E1001 13:45:24.502396 4605 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:24Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:24Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1d17ca42-5162-4e53-b9d0-0c11f7d91daa\\\",\\\"systemUUID\\\":\\\"1ac84113-1352-4ad6-8d32-f12829b39b5d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:24Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:24 crc kubenswrapper[4605]: I1001 13:45:24.506548 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:24 crc kubenswrapper[4605]: I1001 13:45:24.506587 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:24 crc kubenswrapper[4605]: I1001 13:45:24.506603 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:24 crc kubenswrapper[4605]: I1001 13:45:24.506623 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:24 crc kubenswrapper[4605]: I1001 13:45:24.506634 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:24Z","lastTransitionTime":"2025-10-01T13:45:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:24 crc kubenswrapper[4605]: E1001 13:45:24.517855 4605 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:24Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:24Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1d17ca42-5162-4e53-b9d0-0c11f7d91daa\\\",\\\"systemUUID\\\":\\\"1ac84113-1352-4ad6-8d32-f12829b39b5d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:24Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:24 crc kubenswrapper[4605]: I1001 13:45:24.522724 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:24 crc kubenswrapper[4605]: I1001 13:45:24.522757 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:24 crc kubenswrapper[4605]: I1001 13:45:24.522770 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:24 crc kubenswrapper[4605]: I1001 13:45:24.522787 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:24 crc kubenswrapper[4605]: I1001 13:45:24.522800 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:24Z","lastTransitionTime":"2025-10-01T13:45:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:24 crc kubenswrapper[4605]: E1001 13:45:24.538082 4605 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:24Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:24Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1d17ca42-5162-4e53-b9d0-0c11f7d91daa\\\",\\\"systemUUID\\\":\\\"1ac84113-1352-4ad6-8d32-f12829b39b5d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:24Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:24 crc kubenswrapper[4605]: I1001 13:45:24.542512 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:24 crc kubenswrapper[4605]: I1001 13:45:24.542548 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:24 crc kubenswrapper[4605]: I1001 13:45:24.542560 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:24 crc kubenswrapper[4605]: I1001 13:45:24.542579 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:24 crc kubenswrapper[4605]: I1001 13:45:24.542594 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:24Z","lastTransitionTime":"2025-10-01T13:45:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:24 crc kubenswrapper[4605]: E1001 13:45:24.557464 4605 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:24Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:24Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1d17ca42-5162-4e53-b9d0-0c11f7d91daa\\\",\\\"systemUUID\\\":\\\"1ac84113-1352-4ad6-8d32-f12829b39b5d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:24Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:24 crc kubenswrapper[4605]: E1001 13:45:24.557652 4605 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 01 13:45:24 crc kubenswrapper[4605]: I1001 13:45:24.559952 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:24 crc kubenswrapper[4605]: I1001 13:45:24.559983 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:24 crc kubenswrapper[4605]: I1001 13:45:24.559991 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:24 crc kubenswrapper[4605]: I1001 13:45:24.560007 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:24 crc kubenswrapper[4605]: I1001 13:45:24.560017 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:24Z","lastTransitionTime":"2025-10-01T13:45:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:24 crc kubenswrapper[4605]: I1001 13:45:24.663220 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:24 crc kubenswrapper[4605]: I1001 13:45:24.663281 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:24 crc kubenswrapper[4605]: I1001 13:45:24.663297 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:24 crc kubenswrapper[4605]: I1001 13:45:24.663317 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:24 crc kubenswrapper[4605]: I1001 13:45:24.663330 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:24Z","lastTransitionTime":"2025-10-01T13:45:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:24 crc kubenswrapper[4605]: I1001 13:45:24.767470 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:24 crc kubenswrapper[4605]: I1001 13:45:24.767591 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:24 crc kubenswrapper[4605]: I1001 13:45:24.767609 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:24 crc kubenswrapper[4605]: I1001 13:45:24.767641 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:24 crc kubenswrapper[4605]: I1001 13:45:24.767656 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:24Z","lastTransitionTime":"2025-10-01T13:45:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:24 crc kubenswrapper[4605]: I1001 13:45:24.870505 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:24 crc kubenswrapper[4605]: I1001 13:45:24.870824 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:24 crc kubenswrapper[4605]: I1001 13:45:24.870918 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:24 crc kubenswrapper[4605]: I1001 13:45:24.871031 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:24 crc kubenswrapper[4605]: I1001 13:45:24.871046 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:24Z","lastTransitionTime":"2025-10-01T13:45:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:24 crc kubenswrapper[4605]: I1001 13:45:24.974964 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:24 crc kubenswrapper[4605]: I1001 13:45:24.975032 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:24 crc kubenswrapper[4605]: I1001 13:45:24.975049 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:24 crc kubenswrapper[4605]: I1001 13:45:24.975074 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:24 crc kubenswrapper[4605]: I1001 13:45:24.975108 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:24Z","lastTransitionTime":"2025-10-01T13:45:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:25 crc kubenswrapper[4605]: I1001 13:45:25.078062 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:25 crc kubenswrapper[4605]: I1001 13:45:25.078132 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:25 crc kubenswrapper[4605]: I1001 13:45:25.078143 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:25 crc kubenswrapper[4605]: I1001 13:45:25.078159 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:25 crc kubenswrapper[4605]: I1001 13:45:25.078170 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:25Z","lastTransitionTime":"2025-10-01T13:45:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:25 crc kubenswrapper[4605]: I1001 13:45:25.181323 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:25 crc kubenswrapper[4605]: I1001 13:45:25.181466 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:25 crc kubenswrapper[4605]: I1001 13:45:25.181481 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:25 crc kubenswrapper[4605]: I1001 13:45:25.181506 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:25 crc kubenswrapper[4605]: I1001 13:45:25.181526 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:25Z","lastTransitionTime":"2025-10-01T13:45:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:25 crc kubenswrapper[4605]: I1001 13:45:25.284441 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:25 crc kubenswrapper[4605]: I1001 13:45:25.284497 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:25 crc kubenswrapper[4605]: I1001 13:45:25.284506 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:25 crc kubenswrapper[4605]: I1001 13:45:25.284522 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:25 crc kubenswrapper[4605]: I1001 13:45:25.284531 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:25Z","lastTransitionTime":"2025-10-01T13:45:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:25 crc kubenswrapper[4605]: I1001 13:45:25.387909 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:25 crc kubenswrapper[4605]: I1001 13:45:25.387952 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:25 crc kubenswrapper[4605]: I1001 13:45:25.387969 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:25 crc kubenswrapper[4605]: I1001 13:45:25.387986 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:25 crc kubenswrapper[4605]: I1001 13:45:25.387997 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:25Z","lastTransitionTime":"2025-10-01T13:45:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:25 crc kubenswrapper[4605]: I1001 13:45:25.489960 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:25 crc kubenswrapper[4605]: I1001 13:45:25.490007 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:25 crc kubenswrapper[4605]: I1001 13:45:25.490026 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:25 crc kubenswrapper[4605]: I1001 13:45:25.490043 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:25 crc kubenswrapper[4605]: I1001 13:45:25.490055 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:25Z","lastTransitionTime":"2025-10-01T13:45:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:25 crc kubenswrapper[4605]: I1001 13:45:25.592762 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:25 crc kubenswrapper[4605]: I1001 13:45:25.592804 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:25 crc kubenswrapper[4605]: I1001 13:45:25.592813 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:25 crc kubenswrapper[4605]: I1001 13:45:25.592830 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:25 crc kubenswrapper[4605]: I1001 13:45:25.592843 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:25Z","lastTransitionTime":"2025-10-01T13:45:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:25 crc kubenswrapper[4605]: I1001 13:45:25.696315 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:25 crc kubenswrapper[4605]: I1001 13:45:25.696378 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:25 crc kubenswrapper[4605]: I1001 13:45:25.696390 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:25 crc kubenswrapper[4605]: I1001 13:45:25.696407 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:25 crc kubenswrapper[4605]: I1001 13:45:25.696419 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:25Z","lastTransitionTime":"2025-10-01T13:45:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:25 crc kubenswrapper[4605]: I1001 13:45:25.798937 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:25 crc kubenswrapper[4605]: I1001 13:45:25.798998 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:25 crc kubenswrapper[4605]: I1001 13:45:25.799015 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:25 crc kubenswrapper[4605]: I1001 13:45:25.799040 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:25 crc kubenswrapper[4605]: I1001 13:45:25.799057 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:25Z","lastTransitionTime":"2025-10-01T13:45:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:25 crc kubenswrapper[4605]: I1001 13:45:25.902334 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:25 crc kubenswrapper[4605]: I1001 13:45:25.902375 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:25 crc kubenswrapper[4605]: I1001 13:45:25.902384 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:25 crc kubenswrapper[4605]: I1001 13:45:25.902399 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:25 crc kubenswrapper[4605]: I1001 13:45:25.902410 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:25Z","lastTransitionTime":"2025-10-01T13:45:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:25 crc kubenswrapper[4605]: I1001 13:45:25.926392 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 13:45:25 crc kubenswrapper[4605]: I1001 13:45:25.926406 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-m7ph7" Oct 01 13:45:25 crc kubenswrapper[4605]: I1001 13:45:25.926548 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 13:45:25 crc kubenswrapper[4605]: I1001 13:45:25.926632 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:45:25 crc kubenswrapper[4605]: E1001 13:45:25.926674 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 13:45:25 crc kubenswrapper[4605]: E1001 13:45:25.926831 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 13:45:25 crc kubenswrapper[4605]: E1001 13:45:25.926994 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-m7ph7" podUID="8c172ce5-f64e-417d-9fc7-e06c5e443fbc" Oct 01 13:45:25 crc kubenswrapper[4605]: E1001 13:45:25.927056 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 13:45:26 crc kubenswrapper[4605]: I1001 13:45:26.005681 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:26 crc kubenswrapper[4605]: I1001 13:45:26.005756 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:26 crc kubenswrapper[4605]: I1001 13:45:26.005772 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:26 crc kubenswrapper[4605]: I1001 13:45:26.005795 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:26 crc kubenswrapper[4605]: I1001 13:45:26.005813 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:26Z","lastTransitionTime":"2025-10-01T13:45:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:26 crc kubenswrapper[4605]: I1001 13:45:26.108414 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:26 crc kubenswrapper[4605]: I1001 13:45:26.108452 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:26 crc kubenswrapper[4605]: I1001 13:45:26.108462 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:26 crc kubenswrapper[4605]: I1001 13:45:26.108478 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:26 crc kubenswrapper[4605]: I1001 13:45:26.108490 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:26Z","lastTransitionTime":"2025-10-01T13:45:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:26 crc kubenswrapper[4605]: I1001 13:45:26.210960 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:26 crc kubenswrapper[4605]: I1001 13:45:26.211010 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:26 crc kubenswrapper[4605]: I1001 13:45:26.211020 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:26 crc kubenswrapper[4605]: I1001 13:45:26.211038 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:26 crc kubenswrapper[4605]: I1001 13:45:26.211056 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:26Z","lastTransitionTime":"2025-10-01T13:45:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:26 crc kubenswrapper[4605]: I1001 13:45:26.314723 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:26 crc kubenswrapper[4605]: I1001 13:45:26.314819 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:26 crc kubenswrapper[4605]: I1001 13:45:26.314837 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:26 crc kubenswrapper[4605]: I1001 13:45:26.314864 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:26 crc kubenswrapper[4605]: I1001 13:45:26.314879 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:26Z","lastTransitionTime":"2025-10-01T13:45:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:26 crc kubenswrapper[4605]: I1001 13:45:26.418266 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:26 crc kubenswrapper[4605]: I1001 13:45:26.418342 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:26 crc kubenswrapper[4605]: I1001 13:45:26.418357 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:26 crc kubenswrapper[4605]: I1001 13:45:26.418378 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:26 crc kubenswrapper[4605]: I1001 13:45:26.418396 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:26Z","lastTransitionTime":"2025-10-01T13:45:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:26 crc kubenswrapper[4605]: I1001 13:45:26.520957 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:26 crc kubenswrapper[4605]: I1001 13:45:26.521355 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:26 crc kubenswrapper[4605]: I1001 13:45:26.521441 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:26 crc kubenswrapper[4605]: I1001 13:45:26.521543 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:26 crc kubenswrapper[4605]: I1001 13:45:26.521652 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:26Z","lastTransitionTime":"2025-10-01T13:45:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:26 crc kubenswrapper[4605]: I1001 13:45:26.624441 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:26 crc kubenswrapper[4605]: I1001 13:45:26.624512 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:26 crc kubenswrapper[4605]: I1001 13:45:26.624539 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:26 crc kubenswrapper[4605]: I1001 13:45:26.624564 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:26 crc kubenswrapper[4605]: I1001 13:45:26.624577 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:26Z","lastTransitionTime":"2025-10-01T13:45:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:26 crc kubenswrapper[4605]: I1001 13:45:26.727452 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:26 crc kubenswrapper[4605]: I1001 13:45:26.727518 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:26 crc kubenswrapper[4605]: I1001 13:45:26.727539 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:26 crc kubenswrapper[4605]: I1001 13:45:26.727566 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:26 crc kubenswrapper[4605]: I1001 13:45:26.727586 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:26Z","lastTransitionTime":"2025-10-01T13:45:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:26 crc kubenswrapper[4605]: I1001 13:45:26.831452 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:26 crc kubenswrapper[4605]: I1001 13:45:26.831518 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:26 crc kubenswrapper[4605]: I1001 13:45:26.831539 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:26 crc kubenswrapper[4605]: I1001 13:45:26.831566 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:26 crc kubenswrapper[4605]: I1001 13:45:26.831586 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:26Z","lastTransitionTime":"2025-10-01T13:45:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:26 crc kubenswrapper[4605]: I1001 13:45:26.927922 4605 scope.go:117] "RemoveContainer" containerID="516a2c90123655832749bb1f379c3ac1800cbe59053cf56544424f8dabcee07a" Oct 01 13:45:26 crc kubenswrapper[4605]: I1001 13:45:26.934685 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:26 crc kubenswrapper[4605]: I1001 13:45:26.934736 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:26 crc kubenswrapper[4605]: I1001 13:45:26.934749 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:26 crc kubenswrapper[4605]: I1001 13:45:26.934770 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:26 crc kubenswrapper[4605]: I1001 13:45:26.934784 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:26Z","lastTransitionTime":"2025-10-01T13:45:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:27 crc kubenswrapper[4605]: I1001 13:45:27.038449 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:27 crc kubenswrapper[4605]: I1001 13:45:27.038905 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:27 crc kubenswrapper[4605]: I1001 13:45:27.039252 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:27 crc kubenswrapper[4605]: I1001 13:45:27.039735 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:27 crc kubenswrapper[4605]: I1001 13:45:27.039808 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:27Z","lastTransitionTime":"2025-10-01T13:45:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:27 crc kubenswrapper[4605]: I1001 13:45:27.143157 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:27 crc kubenswrapper[4605]: I1001 13:45:27.143512 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:27 crc kubenswrapper[4605]: I1001 13:45:27.143529 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:27 crc kubenswrapper[4605]: I1001 13:45:27.143552 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:27 crc kubenswrapper[4605]: I1001 13:45:27.143567 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:27Z","lastTransitionTime":"2025-10-01T13:45:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:27 crc kubenswrapper[4605]: I1001 13:45:27.248543 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:27 crc kubenswrapper[4605]: I1001 13:45:27.248974 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:27 crc kubenswrapper[4605]: I1001 13:45:27.249364 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:27 crc kubenswrapper[4605]: I1001 13:45:27.249477 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:27 crc kubenswrapper[4605]: I1001 13:45:27.249563 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:27Z","lastTransitionTime":"2025-10-01T13:45:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:27 crc kubenswrapper[4605]: I1001 13:45:27.352828 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:27 crc kubenswrapper[4605]: I1001 13:45:27.352866 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:27 crc kubenswrapper[4605]: I1001 13:45:27.352877 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:27 crc kubenswrapper[4605]: I1001 13:45:27.352893 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:27 crc kubenswrapper[4605]: I1001 13:45:27.352903 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:27Z","lastTransitionTime":"2025-10-01T13:45:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:27 crc kubenswrapper[4605]: I1001 13:45:27.456162 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:27 crc kubenswrapper[4605]: I1001 13:45:27.456530 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:27 crc kubenswrapper[4605]: I1001 13:45:27.456605 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:27 crc kubenswrapper[4605]: I1001 13:45:27.456675 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:27 crc kubenswrapper[4605]: I1001 13:45:27.456750 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:27Z","lastTransitionTime":"2025-10-01T13:45:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:27 crc kubenswrapper[4605]: I1001 13:45:27.559498 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:27 crc kubenswrapper[4605]: I1001 13:45:27.559550 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:27 crc kubenswrapper[4605]: I1001 13:45:27.559563 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:27 crc kubenswrapper[4605]: I1001 13:45:27.559584 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:27 crc kubenswrapper[4605]: I1001 13:45:27.559597 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:27Z","lastTransitionTime":"2025-10-01T13:45:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:27 crc kubenswrapper[4605]: I1001 13:45:27.662266 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:27 crc kubenswrapper[4605]: I1001 13:45:27.662295 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:27 crc kubenswrapper[4605]: I1001 13:45:27.662304 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:27 crc kubenswrapper[4605]: I1001 13:45:27.662320 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:27 crc kubenswrapper[4605]: I1001 13:45:27.662329 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:27Z","lastTransitionTime":"2025-10-01T13:45:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:27 crc kubenswrapper[4605]: I1001 13:45:27.765134 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:27 crc kubenswrapper[4605]: I1001 13:45:27.765192 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:27 crc kubenswrapper[4605]: I1001 13:45:27.765204 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:27 crc kubenswrapper[4605]: I1001 13:45:27.765224 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:27 crc kubenswrapper[4605]: I1001 13:45:27.765258 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:27Z","lastTransitionTime":"2025-10-01T13:45:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:27 crc kubenswrapper[4605]: I1001 13:45:27.868051 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:27 crc kubenswrapper[4605]: I1001 13:45:27.868181 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:27 crc kubenswrapper[4605]: I1001 13:45:27.868195 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:27 crc kubenswrapper[4605]: I1001 13:45:27.868220 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:27 crc kubenswrapper[4605]: I1001 13:45:27.868266 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:27Z","lastTransitionTime":"2025-10-01T13:45:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:27 crc kubenswrapper[4605]: I1001 13:45:27.925881 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 13:45:27 crc kubenswrapper[4605]: I1001 13:45:27.925915 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 13:45:27 crc kubenswrapper[4605]: I1001 13:45:27.925982 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-m7ph7" Oct 01 13:45:27 crc kubenswrapper[4605]: I1001 13:45:27.926009 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:45:27 crc kubenswrapper[4605]: E1001 13:45:27.926285 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-m7ph7" podUID="8c172ce5-f64e-417d-9fc7-e06c5e443fbc" Oct 01 13:45:27 crc kubenswrapper[4605]: E1001 13:45:27.926377 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 13:45:27 crc kubenswrapper[4605]: E1001 13:45:27.926458 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 13:45:27 crc kubenswrapper[4605]: E1001 13:45:27.926155 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 13:45:27 crc kubenswrapper[4605]: I1001 13:45:27.948823 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d3d0d59e6a31efc844c56e1ad43cd326a7b2f1844784f2814469e36394cf377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:27Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:27 crc kubenswrapper[4605]: I1001 13:45:27.966499 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64e2eda677e75448f2e7e3fd477052f3a596e6c11d745848becc6c22f133e6b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:27Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:27 crc kubenswrapper[4605]: I1001 13:45:27.971044 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:27 crc kubenswrapper[4605]: I1001 13:45:27.971072 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:27 crc kubenswrapper[4605]: I1001 13:45:27.971083 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:27 crc kubenswrapper[4605]: I1001 13:45:27.971122 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:27 crc kubenswrapper[4605]: I1001 13:45:27.971131 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:27Z","lastTransitionTime":"2025-10-01T13:45:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:27 crc kubenswrapper[4605]: I1001 13:45:27.979920 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6zb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83630902-b99b-4944-81a4-487e9584e0c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0762e1125229327b00202fd05bc17fd641b76f2421e20d0672b3e2d3b0f7538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jt4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6zb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:27Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:27 crc kubenswrapper[4605]: I1001 13:45:27.991083 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jvqzn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4d5a988-e2c8-47db-b738-cb43467b1bfb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17326e690c100180a8e3d1a0180522378ef995bd1f5cba52d6f147a958351118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-985lh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:45:07Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jvqzn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:27Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:28 crc kubenswrapper[4605]: I1001 13:45:28.009978 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:28Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:28 crc kubenswrapper[4605]: I1001 13:45:28.025411 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eda516c-c3a2-4e46-b9c2-b603ebc2d618\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c15c835868f9e04648db64fcf271a4c212d406e7bcc7cfca15167b853a02a70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d0dbfcc18f63ddc136fc049c023f326cb882adbcf1123f1ec5d4b884ae3970a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed56fd63f436435206d3574740efba75e8a94582fe128388e03b398131acd4ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80bd23f5be42dc5714bba2b0f742b8aa9fdd60540db1048054e76c00f356b240\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83498000b4c2e2ea1b498069626fa6916e9fc5e95a3ecf0136b2cb0ca8a409b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759326292\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759326291\\\\\\\\\\\\\\\" (2025-10-01 12:44:51 +0000 UTC to 2026-10-01 12:44:51 +0000 UTC (now=2025-10-01 13:44:57.347830124 +0000 UTC))\\\\\\\"\\\\nI1001 13:44:57.347872 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1001 13:44:57.347895 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1001 13:44:57.347920 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347949 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347989 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4229714179/tls.crt::/tmp/serving-cert-4229714179/tls.key\\\\\\\"\\\\nI1001 13:44:57.348126 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348139 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1001 13:44:57.348152 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348158 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1001 13:44:57.348154 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1001 13:44:57.348212 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1001 13:44:57.348222 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF1001 13:44:57.350577 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:41Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://113f3599c725570c4484ed005921c30a43db5ffec24b72907c6c7546453fe363\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:28Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:28 crc kubenswrapper[4605]: I1001 13:45:28.038381 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ca1d91d-0902-4d3a-b66a-a556b5009d8b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acf6d9c6b834cf378303c7ee6e1af3f3cde2502d8f28a6e5d3ec33deb69434b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4200a3723d31af3d800fca144949b047d3ef2960d856f286899351523593061\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://671dc002b0ca1a50b36373cbf0a8971b0f751989c9f19acedb524b09afd53517\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ab08a748b9c3040ea1af963f8ebeef630d7fb260122baba05229615424850d6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:28Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:28 crc kubenswrapper[4605]: I1001 13:45:28.058003 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c3d55cda00f6c5763662b3f96b4c36f1fb8c220fcd14ff3469b565deac718a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e6578a529a458eab6242d8f667520303c65bd53ceba7598d4c9680c7a93bace\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:28Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:28 crc kubenswrapper[4605]: I1001 13:45:28.073199 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:28Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:28 crc kubenswrapper[4605]: I1001 13:45:28.073368 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:28 crc kubenswrapper[4605]: I1001 13:45:28.073414 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:28 crc kubenswrapper[4605]: I1001 13:45:28.073424 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:28 crc kubenswrapper[4605]: I1001 13:45:28.073446 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:28 crc kubenswrapper[4605]: I1001 13:45:28.073458 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:28Z","lastTransitionTime":"2025-10-01T13:45:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:28 crc kubenswrapper[4605]: I1001 13:45:28.086943 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:28Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:28 crc kubenswrapper[4605]: I1001 13:45:28.099569 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-m7ph7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8c172ce5-f64e-417d-9fc7-e06c5e443fbc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5krq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5krq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:45:13Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-m7ph7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:28Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:28 crc kubenswrapper[4605]: I1001 13:45:28.111390 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3023060-c8ae-492b-b1cb-a418d9a8e59f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd1361d1d9cb03a3942918266a1e85d3e370eabdfa7b7b1e40971995928187b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae5b8e3f4bd159c632b04545707c7140ba6fcee21a3a3847d5e7f2b9e41b9178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdjh7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:28Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:28 crc kubenswrapper[4605]: I1001 13:45:28.125051 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xclfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9fc4aae-03cb-458d-83cb-1a3ab9fa9639\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f8eab67511426f65e14781420480f85ea6b0d9ab25f6846a68820a1ded0053f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://14d2a7aa9a0027ef28b5ec7b288ce0a5367c53f616ed3e75216d07c9809a86a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14d2a7aa9a0027ef28b5ec7b288ce0a5367c53f616ed3e75216d07c9809a86a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0abe21aad2433d34c4a769e87e28f3c479511a4580df607d1496ed33c85385c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0abe21aad2433d34c4a769e87e28f3c479511a4580df607d1496ed33c85385c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b870108ecd4816a8d0d3d32f4b2384246c0b4537639e9cd04d660fea3246244\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b870108ecd4816a8d0d3d32f4b2384246c0b4537639e9cd04d660fea3246244\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b96314b06953b56383a86305cf1702c5f02add621d7981a36444d2d3998cf0f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b96314b06953b56383a86305cf1702c5f02add621d7981a36444d2d3998cf0f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f0381694561100e524cc380f1a2c6050c7192f27ec8f27ad2e41abce969c1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24f0381694561100e524cc380f1a2c6050c7192f27ec8f27ad2e41abce969c1c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfb30493a0c650f9536bba6417da0a582b1b23b6488223359856c12d1024d57f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfb30493a0c650f9536bba6417da0a582b1b23b6488223359856c12d1024d57f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xclfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:28Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:28 crc kubenswrapper[4605]: I1001 13:45:28.142117 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wgx5p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c2ca71f-4cb0-4852-927d-af69be5d77f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1180cac382ded8ae1a7be2e5738d96beceed10f750d31e36ae1520416a71e8dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7gtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wgx5p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:28Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:28 crc kubenswrapper[4605]: I1001 13:45:28.166729 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://56f2691df87d1c843a73c06c39c7ad1d5477fb1f10c6f3f797f658e7d0c0d04d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09ecc57bc8186c325ef0bd38c17882024dda3714b52f9b0295a75ed93b2d6736\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f6f39aa4d204b41067d2705ce828e79752ca4e5b7689821e34c98f5041ce925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddba08349bbb0f7040bb04f37d4b5a137de327a5c4a7a5689396a2a1925b062a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://98143f416c776bf79ee24b42b70d88d1cbe4528bd7a1c0fc886a8f6e711c6400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8442849a4d37d618e701a2ea4c1dd33944894765c874fd5635c74d37a7f411b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://516a2c90123655832749bb1f379c3ac1800cbe59053cf56544424f8dabcee07a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://516a2c90123655832749bb1f379c3ac1800cbe59053cf56544424f8dabcee07a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T13:45:11Z\\\",\\\"message\\\":\\\"openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:serving-cert service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc0076549ef \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,Protocol:TCP,Port:443,TargetPort:{0 8443 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{app: service-ca-operator,},ClusterIP:10.217.4.40,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.4.40],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nF1001 13:45:11.204670 5928 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 0x1e60020 0x1e5ffc0} was not added to shared informer because it has stopped already, failed to\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:10Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-kzv4p_openshift-ovn-kubernetes(e0b90c02-c41c-4f5b-ae0a-c6444435a3ae)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d1219f1caadb9f0992fa56b158a23ace3616d178360c4690657deb351a3ccaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-kzv4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:28Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:28 crc kubenswrapper[4605]: I1001 13:45:28.176336 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:28 crc kubenswrapper[4605]: I1001 13:45:28.176395 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:28 crc kubenswrapper[4605]: I1001 13:45:28.176421 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:28 crc kubenswrapper[4605]: I1001 13:45:28.176462 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:28 crc kubenswrapper[4605]: I1001 13:45:28.176480 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:28Z","lastTransitionTime":"2025-10-01T13:45:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:28 crc kubenswrapper[4605]: I1001 13:45:28.189210 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gqbr2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a6400c9-7945-44a6-b37d-e94811fc9754\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c32729bcc338f810b05ad3ac0cd10feeeaff031cd81540b79fec759bbc2b419\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2jrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51a4fc25d3fb27e7ea7fba17367d9a0b3fefa388d306e3589ec13e3cf6e61452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2jrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:45:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-gqbr2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:28Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:28 crc kubenswrapper[4605]: I1001 13:45:28.279241 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:28 crc kubenswrapper[4605]: I1001 13:45:28.279282 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:28 crc kubenswrapper[4605]: I1001 13:45:28.279290 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:28 crc kubenswrapper[4605]: I1001 13:45:28.279322 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:28 crc kubenswrapper[4605]: I1001 13:45:28.279333 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:28Z","lastTransitionTime":"2025-10-01T13:45:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:28 crc kubenswrapper[4605]: I1001 13:45:28.383358 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:28 crc kubenswrapper[4605]: I1001 13:45:28.383483 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:28 crc kubenswrapper[4605]: I1001 13:45:28.383548 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:28 crc kubenswrapper[4605]: I1001 13:45:28.383574 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:28 crc kubenswrapper[4605]: I1001 13:45:28.383676 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:28Z","lastTransitionTime":"2025-10-01T13:45:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:28 crc kubenswrapper[4605]: I1001 13:45:28.393417 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-kzv4p_e0b90c02-c41c-4f5b-ae0a-c6444435a3ae/ovnkube-controller/1.log" Oct 01 13:45:28 crc kubenswrapper[4605]: I1001 13:45:28.398973 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" event={"ID":"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae","Type":"ContainerStarted","Data":"e41ab9ad83951f275a359365c118fdba9ea91417dc52a9cd98b0a55f84788c64"} Oct 01 13:45:28 crc kubenswrapper[4605]: I1001 13:45:28.399719 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" Oct 01 13:45:28 crc kubenswrapper[4605]: I1001 13:45:28.424062 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wgx5p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c2ca71f-4cb0-4852-927d-af69be5d77f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1180cac382ded8ae1a7be2e5738d96beceed10f750d31e36ae1520416a71e8dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7gtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wgx5p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:28Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:28 crc kubenswrapper[4605]: I1001 13:45:28.458520 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://56f2691df87d1c843a73c06c39c7ad1d5477fb1f10c6f3f797f658e7d0c0d04d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09ecc57bc8186c325ef0bd38c17882024dda3714b52f9b0295a75ed93b2d6736\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f6f39aa4d204b41067d2705ce828e79752ca4e5b7689821e34c98f5041ce925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddba08349bbb0f7040bb04f37d4b5a137de327a5c4a7a5689396a2a1925b062a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://98143f416c776bf79ee24b42b70d88d1cbe4528bd7a1c0fc886a8f6e711c6400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8442849a4d37d618e701a2ea4c1dd33944894765c874fd5635c74d37a7f411b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e41ab9ad83951f275a359365c118fdba9ea91417dc52a9cd98b0a55f84788c64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://516a2c90123655832749bb1f379c3ac1800cbe59053cf56544424f8dabcee07a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T13:45:11Z\\\",\\\"message\\\":\\\"openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:serving-cert service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc0076549ef \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,Protocol:TCP,Port:443,TargetPort:{0 8443 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{app: service-ca-operator,},ClusterIP:10.217.4.40,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.4.40],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nF1001 13:45:11.204670 5928 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 0x1e60020 0x1e5ffc0} was not added to shared informer because it has stopped already, failed to\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:10Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d1219f1caadb9f0992fa56b158a23ace3616d178360c4690657deb351a3ccaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-kzv4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:28Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:28 crc kubenswrapper[4605]: I1001 13:45:28.480315 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gqbr2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a6400c9-7945-44a6-b37d-e94811fc9754\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c32729bcc338f810b05ad3ac0cd10feeeaff031cd81540b79fec759bbc2b419\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2jrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51a4fc25d3fb27e7ea7fba17367d9a0b3fefa388d306e3589ec13e3cf6e61452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2jrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:45:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-gqbr2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:28Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:28 crc kubenswrapper[4605]: I1001 13:45:28.487135 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:28 crc kubenswrapper[4605]: I1001 13:45:28.487392 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:28 crc kubenswrapper[4605]: I1001 13:45:28.487563 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:28 crc kubenswrapper[4605]: I1001 13:45:28.487714 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:28 crc kubenswrapper[4605]: I1001 13:45:28.487852 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:28Z","lastTransitionTime":"2025-10-01T13:45:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:28 crc kubenswrapper[4605]: I1001 13:45:28.506038 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3023060-c8ae-492b-b1cb-a418d9a8e59f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd1361d1d9cb03a3942918266a1e85d3e370eabdfa7b7b1e40971995928187b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae5b8e3f4bd159c632b04545707c7140ba6fcee21a3a3847d5e7f2b9e41b9178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdjh7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:28Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:28 crc kubenswrapper[4605]: I1001 13:45:28.533860 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xclfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9fc4aae-03cb-458d-83cb-1a3ab9fa9639\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f8eab67511426f65e14781420480f85ea6b0d9ab25f6846a68820a1ded0053f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://14d2a7aa9a0027ef28b5ec7b288ce0a5367c53f616ed3e75216d07c9809a86a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14d2a7aa9a0027ef28b5ec7b288ce0a5367c53f616ed3e75216d07c9809a86a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0abe21aad2433d34c4a769e87e28f3c479511a4580df607d1496ed33c85385c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0abe21aad2433d34c4a769e87e28f3c479511a4580df607d1496ed33c85385c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b870108ecd4816a8d0d3d32f4b2384246c0b4537639e9cd04d660fea3246244\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b870108ecd4816a8d0d3d32f4b2384246c0b4537639e9cd04d660fea3246244\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b96314b06953b56383a86305cf1702c5f02add621d7981a36444d2d3998cf0f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b96314b06953b56383a86305cf1702c5f02add621d7981a36444d2d3998cf0f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f0381694561100e524cc380f1a2c6050c7192f27ec8f27ad2e41abce969c1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24f0381694561100e524cc380f1a2c6050c7192f27ec8f27ad2e41abce969c1c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfb30493a0c650f9536bba6417da0a582b1b23b6488223359856c12d1024d57f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfb30493a0c650f9536bba6417da0a582b1b23b6488223359856c12d1024d57f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xclfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:28Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:28 crc kubenswrapper[4605]: I1001 13:45:28.551608 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6zb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83630902-b99b-4944-81a4-487e9584e0c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0762e1125229327b00202fd05bc17fd641b76f2421e20d0672b3e2d3b0f7538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jt4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6zb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:28Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:28 crc kubenswrapper[4605]: I1001 13:45:28.573268 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jvqzn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4d5a988-e2c8-47db-b738-cb43467b1bfb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17326e690c100180a8e3d1a0180522378ef995bd1f5cba52d6f147a958351118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-985lh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:45:07Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jvqzn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:28Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:28 crc kubenswrapper[4605]: I1001 13:45:28.590815 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:28 crc kubenswrapper[4605]: I1001 13:45:28.590870 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:28 crc kubenswrapper[4605]: I1001 13:45:28.590881 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:28 crc kubenswrapper[4605]: I1001 13:45:28.590903 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:28 crc kubenswrapper[4605]: I1001 13:45:28.590917 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:28Z","lastTransitionTime":"2025-10-01T13:45:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:28 crc kubenswrapper[4605]: I1001 13:45:28.601239 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d3d0d59e6a31efc844c56e1ad43cd326a7b2f1844784f2814469e36394cf377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:28Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:28 crc kubenswrapper[4605]: I1001 13:45:28.622681 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64e2eda677e75448f2e7e3fd477052f3a596e6c11d745848becc6c22f133e6b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:28Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:28 crc kubenswrapper[4605]: I1001 13:45:28.648623 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ca1d91d-0902-4d3a-b66a-a556b5009d8b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acf6d9c6b834cf378303c7ee6e1af3f3cde2502d8f28a6e5d3ec33deb69434b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4200a3723d31af3d800fca144949b047d3ef2960d856f286899351523593061\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://671dc002b0ca1a50b36373cbf0a8971b0f751989c9f19acedb524b09afd53517\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ab08a748b9c3040ea1af963f8ebeef630d7fb260122baba05229615424850d6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:28Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:28 crc kubenswrapper[4605]: I1001 13:45:28.671049 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:28Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:28 crc kubenswrapper[4605]: I1001 13:45:28.692970 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eda516c-c3a2-4e46-b9c2-b603ebc2d618\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c15c835868f9e04648db64fcf271a4c212d406e7bcc7cfca15167b853a02a70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d0dbfcc18f63ddc136fc049c023f326cb882adbcf1123f1ec5d4b884ae3970a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed56fd63f436435206d3574740efba75e8a94582fe128388e03b398131acd4ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80bd23f5be42dc5714bba2b0f742b8aa9fdd60540db1048054e76c00f356b240\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83498000b4c2e2ea1b498069626fa6916e9fc5e95a3ecf0136b2cb0ca8a409b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759326292\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759326291\\\\\\\\\\\\\\\" (2025-10-01 12:44:51 +0000 UTC to 2026-10-01 12:44:51 +0000 UTC (now=2025-10-01 13:44:57.347830124 +0000 UTC))\\\\\\\"\\\\nI1001 13:44:57.347872 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1001 13:44:57.347895 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1001 13:44:57.347920 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347949 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347989 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4229714179/tls.crt::/tmp/serving-cert-4229714179/tls.key\\\\\\\"\\\\nI1001 13:44:57.348126 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348139 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1001 13:44:57.348152 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348158 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1001 13:44:57.348154 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1001 13:44:57.348212 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1001 13:44:57.348222 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF1001 13:44:57.350577 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:41Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://113f3599c725570c4484ed005921c30a43db5ffec24b72907c6c7546453fe363\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:28Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:28 crc kubenswrapper[4605]: I1001 13:45:28.694362 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:28 crc kubenswrapper[4605]: I1001 13:45:28.694412 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:28 crc kubenswrapper[4605]: I1001 13:45:28.694423 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:28 crc kubenswrapper[4605]: I1001 13:45:28.694443 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:28 crc kubenswrapper[4605]: I1001 13:45:28.694456 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:28Z","lastTransitionTime":"2025-10-01T13:45:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:28 crc kubenswrapper[4605]: I1001 13:45:28.715530 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:28Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:28 crc kubenswrapper[4605]: I1001 13:45:28.731931 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-m7ph7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8c172ce5-f64e-417d-9fc7-e06c5e443fbc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5krq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5krq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:45:13Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-m7ph7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:28Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:28 crc kubenswrapper[4605]: I1001 13:45:28.751010 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c3d55cda00f6c5763662b3f96b4c36f1fb8c220fcd14ff3469b565deac718a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e6578a529a458eab6242d8f667520303c65bd53ceba7598d4c9680c7a93bace\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:28Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:28 crc kubenswrapper[4605]: I1001 13:45:28.768933 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:28Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:28 crc kubenswrapper[4605]: I1001 13:45:28.797703 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:28 crc kubenswrapper[4605]: I1001 13:45:28.797772 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:28 crc kubenswrapper[4605]: I1001 13:45:28.797807 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:28 crc kubenswrapper[4605]: I1001 13:45:28.797845 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:28 crc kubenswrapper[4605]: I1001 13:45:28.797860 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:28Z","lastTransitionTime":"2025-10-01T13:45:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:28 crc kubenswrapper[4605]: I1001 13:45:28.901503 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:28 crc kubenswrapper[4605]: I1001 13:45:28.901559 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:28 crc kubenswrapper[4605]: I1001 13:45:28.901569 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:28 crc kubenswrapper[4605]: I1001 13:45:28.901584 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:28 crc kubenswrapper[4605]: I1001 13:45:28.901594 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:28Z","lastTransitionTime":"2025-10-01T13:45:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.004498 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.004585 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.004608 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.004638 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.004662 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:29Z","lastTransitionTime":"2025-10-01T13:45:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.048167 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.060892 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.067124 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eda516c-c3a2-4e46-b9c2-b603ebc2d618\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c15c835868f9e04648db64fcf271a4c212d406e7bcc7cfca15167b853a02a70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d0dbfcc18f63ddc136fc049c023f326cb882adbcf1123f1ec5d4b884ae3970a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed56fd63f436435206d3574740efba75e8a94582fe128388e03b398131acd4ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80bd23f5be42dc5714bba2b0f742b8aa9fdd60540db1048054e76c00f356b240\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83498000b4c2e2ea1b498069626fa6916e9fc5e95a3ecf0136b2cb0ca8a409b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759326292\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759326291\\\\\\\\\\\\\\\" (2025-10-01 12:44:51 +0000 UTC to 2026-10-01 12:44:51 +0000 UTC (now=2025-10-01 13:44:57.347830124 +0000 UTC))\\\\\\\"\\\\nI1001 13:44:57.347872 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1001 13:44:57.347895 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1001 13:44:57.347920 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347949 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347989 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4229714179/tls.crt::/tmp/serving-cert-4229714179/tls.key\\\\\\\"\\\\nI1001 13:44:57.348126 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348139 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1001 13:44:57.348152 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348158 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1001 13:44:57.348154 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1001 13:44:57.348212 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1001 13:44:57.348222 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF1001 13:44:57.350577 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:41Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://113f3599c725570c4484ed005921c30a43db5ffec24b72907c6c7546453fe363\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:29Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.087717 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ca1d91d-0902-4d3a-b66a-a556b5009d8b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acf6d9c6b834cf378303c7ee6e1af3f3cde2502d8f28a6e5d3ec33deb69434b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4200a3723d31af3d800fca144949b047d3ef2960d856f286899351523593061\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://671dc002b0ca1a50b36373cbf0a8971b0f751989c9f19acedb524b09afd53517\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ab08a748b9c3040ea1af963f8ebeef630d7fb260122baba05229615424850d6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:29Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.102835 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:29Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.114525 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.114642 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.114670 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.114748 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.114770 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:29Z","lastTransitionTime":"2025-10-01T13:45:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.119834 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c3d55cda00f6c5763662b3f96b4c36f1fb8c220fcd14ff3469b565deac718a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e6578a529a458eab6242d8f667520303c65bd53ceba7598d4c9680c7a93bace\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:29Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.140987 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:29Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.158371 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:29Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.173850 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-m7ph7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8c172ce5-f64e-417d-9fc7-e06c5e443fbc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5krq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5krq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:45:13Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-m7ph7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:29Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.187331 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3023060-c8ae-492b-b1cb-a418d9a8e59f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd1361d1d9cb03a3942918266a1e85d3e370eabdfa7b7b1e40971995928187b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae5b8e3f4bd159c632b04545707c7140ba6fcee21a3a3847d5e7f2b9e41b9178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdjh7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:29Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.204040 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xclfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9fc4aae-03cb-458d-83cb-1a3ab9fa9639\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f8eab67511426f65e14781420480f85ea6b0d9ab25f6846a68820a1ded0053f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://14d2a7aa9a0027ef28b5ec7b288ce0a5367c53f616ed3e75216d07c9809a86a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14d2a7aa9a0027ef28b5ec7b288ce0a5367c53f616ed3e75216d07c9809a86a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0abe21aad2433d34c4a769e87e28f3c479511a4580df607d1496ed33c85385c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0abe21aad2433d34c4a769e87e28f3c479511a4580df607d1496ed33c85385c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b870108ecd4816a8d0d3d32f4b2384246c0b4537639e9cd04d660fea3246244\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b870108ecd4816a8d0d3d32f4b2384246c0b4537639e9cd04d660fea3246244\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b96314b06953b56383a86305cf1702c5f02add621d7981a36444d2d3998cf0f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b96314b06953b56383a86305cf1702c5f02add621d7981a36444d2d3998cf0f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f0381694561100e524cc380f1a2c6050c7192f27ec8f27ad2e41abce969c1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24f0381694561100e524cc380f1a2c6050c7192f27ec8f27ad2e41abce969c1c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfb30493a0c650f9536bba6417da0a582b1b23b6488223359856c12d1024d57f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfb30493a0c650f9536bba6417da0a582b1b23b6488223359856c12d1024d57f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xclfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:29Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.218052 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.218139 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.218164 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.218187 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.218203 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:29Z","lastTransitionTime":"2025-10-01T13:45:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.226650 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wgx5p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c2ca71f-4cb0-4852-927d-af69be5d77f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1180cac382ded8ae1a7be2e5738d96beceed10f750d31e36ae1520416a71e8dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7gtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wgx5p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:29Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.254579 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://56f2691df87d1c843a73c06c39c7ad1d5477fb1f10c6f3f797f658e7d0c0d04d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09ecc57bc8186c325ef0bd38c17882024dda3714b52f9b0295a75ed93b2d6736\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f6f39aa4d204b41067d2705ce828e79752ca4e5b7689821e34c98f5041ce925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddba08349bbb0f7040bb04f37d4b5a137de327a5c4a7a5689396a2a1925b062a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://98143f416c776bf79ee24b42b70d88d1cbe4528bd7a1c0fc886a8f6e711c6400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8442849a4d37d618e701a2ea4c1dd33944894765c874fd5635c74d37a7f411b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e41ab9ad83951f275a359365c118fdba9ea91417dc52a9cd98b0a55f84788c64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://516a2c90123655832749bb1f379c3ac1800cbe59053cf56544424f8dabcee07a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T13:45:11Z\\\",\\\"message\\\":\\\"openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:serving-cert service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc0076549ef \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,Protocol:TCP,Port:443,TargetPort:{0 8443 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{app: service-ca-operator,},ClusterIP:10.217.4.40,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.4.40],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nF1001 13:45:11.204670 5928 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 0x1e60020 0x1e5ffc0} was not added to shared informer because it has stopped already, failed to\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:10Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d1219f1caadb9f0992fa56b158a23ace3616d178360c4690657deb351a3ccaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-kzv4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:29Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.273942 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gqbr2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a6400c9-7945-44a6-b37d-e94811fc9754\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c32729bcc338f810b05ad3ac0cd10feeeaff031cd81540b79fec759bbc2b419\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2jrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51a4fc25d3fb27e7ea7fba17367d9a0b3fefa388d306e3589ec13e3cf6e61452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2jrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:45:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-gqbr2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:29Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.290370 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d3d0d59e6a31efc844c56e1ad43cd326a7b2f1844784f2814469e36394cf377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:29Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.308586 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64e2eda677e75448f2e7e3fd477052f3a596e6c11d745848becc6c22f133e6b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:29Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.322293 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.322345 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.322362 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.322404 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.322422 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:29Z","lastTransitionTime":"2025-10-01T13:45:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.326712 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6zb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83630902-b99b-4944-81a4-487e9584e0c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0762e1125229327b00202fd05bc17fd641b76f2421e20d0672b3e2d3b0f7538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jt4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6zb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:29Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.342066 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jvqzn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4d5a988-e2c8-47db-b738-cb43467b1bfb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17326e690c100180a8e3d1a0180522378ef995bd1f5cba52d6f147a958351118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-985lh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:45:07Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jvqzn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:29Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.405678 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-kzv4p_e0b90c02-c41c-4f5b-ae0a-c6444435a3ae/ovnkube-controller/2.log" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.406921 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-kzv4p_e0b90c02-c41c-4f5b-ae0a-c6444435a3ae/ovnkube-controller/1.log" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.410516 4605 generic.go:334] "Generic (PLEG): container finished" podID="e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" containerID="e41ab9ad83951f275a359365c118fdba9ea91417dc52a9cd98b0a55f84788c64" exitCode=1 Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.410573 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" event={"ID":"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae","Type":"ContainerDied","Data":"e41ab9ad83951f275a359365c118fdba9ea91417dc52a9cd98b0a55f84788c64"} Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.410658 4605 scope.go:117] "RemoveContainer" containerID="516a2c90123655832749bb1f379c3ac1800cbe59053cf56544424f8dabcee07a" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.412586 4605 scope.go:117] "RemoveContainer" containerID="e41ab9ad83951f275a359365c118fdba9ea91417dc52a9cd98b0a55f84788c64" Oct 01 13:45:29 crc kubenswrapper[4605]: E1001 13:45:29.413172 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-kzv4p_openshift-ovn-kubernetes(e0b90c02-c41c-4f5b-ae0a-c6444435a3ae)\"" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" podUID="e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.426199 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.426274 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.426311 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.426344 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.426368 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:29Z","lastTransitionTime":"2025-10-01T13:45:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.440476 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e761e16e-22e8-4656-94dc-0911ae6f6f05\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c3d4d08023677ab885a54e3844be97affb4cf2b7629495bd3ff5c2ca2ea572e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb5327594e9731b96a498f5d9b0c5011693e2c7ef4af097649f9a4fce12aaf24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e397c3307f660a4316c953e5b4a6440a8fd584b7d06b514833a2514c99f5d22b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2705745b03d5e0b8fdb38b0828944ce25553b45328d0ffac4762c3007c37a376\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2705745b03d5e0b8fdb38b0828944ce25553b45328d0ffac4762c3007c37a376\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:29Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.459436 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:29Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.479033 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eda516c-c3a2-4e46-b9c2-b603ebc2d618\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c15c835868f9e04648db64fcf271a4c212d406e7bcc7cfca15167b853a02a70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d0dbfcc18f63ddc136fc049c023f326cb882adbcf1123f1ec5d4b884ae3970a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed56fd63f436435206d3574740efba75e8a94582fe128388e03b398131acd4ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80bd23f5be42dc5714bba2b0f742b8aa9fdd60540db1048054e76c00f356b240\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83498000b4c2e2ea1b498069626fa6916e9fc5e95a3ecf0136b2cb0ca8a409b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759326292\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759326291\\\\\\\\\\\\\\\" (2025-10-01 12:44:51 +0000 UTC to 2026-10-01 12:44:51 +0000 UTC (now=2025-10-01 13:44:57.347830124 +0000 UTC))\\\\\\\"\\\\nI1001 13:44:57.347872 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1001 13:44:57.347895 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1001 13:44:57.347920 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347949 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347989 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4229714179/tls.crt::/tmp/serving-cert-4229714179/tls.key\\\\\\\"\\\\nI1001 13:44:57.348126 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348139 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1001 13:44:57.348152 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348158 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1001 13:44:57.348154 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1001 13:44:57.348212 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1001 13:44:57.348222 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF1001 13:44:57.350577 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:41Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://113f3599c725570c4484ed005921c30a43db5ffec24b72907c6c7546453fe363\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:29Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.494236 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ca1d91d-0902-4d3a-b66a-a556b5009d8b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acf6d9c6b834cf378303c7ee6e1af3f3cde2502d8f28a6e5d3ec33deb69434b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4200a3723d31af3d800fca144949b047d3ef2960d856f286899351523593061\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://671dc002b0ca1a50b36373cbf0a8971b0f751989c9f19acedb524b09afd53517\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ab08a748b9c3040ea1af963f8ebeef630d7fb260122baba05229615424850d6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:29Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.507571 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-m7ph7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8c172ce5-f64e-417d-9fc7-e06c5e443fbc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5krq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5krq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:45:13Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-m7ph7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:29Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.519907 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8c172ce5-f64e-417d-9fc7-e06c5e443fbc-metrics-certs\") pod \"network-metrics-daemon-m7ph7\" (UID: \"8c172ce5-f64e-417d-9fc7-e06c5e443fbc\") " pod="openshift-multus/network-metrics-daemon-m7ph7" Oct 01 13:45:29 crc kubenswrapper[4605]: E1001 13:45:29.520055 4605 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 01 13:45:29 crc kubenswrapper[4605]: E1001 13:45:29.520172 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8c172ce5-f64e-417d-9fc7-e06c5e443fbc-metrics-certs podName:8c172ce5-f64e-417d-9fc7-e06c5e443fbc nodeName:}" failed. No retries permitted until 2025-10-01 13:45:45.520149862 +0000 UTC m=+68.264126070 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/8c172ce5-f64e-417d-9fc7-e06c5e443fbc-metrics-certs") pod "network-metrics-daemon-m7ph7" (UID: "8c172ce5-f64e-417d-9fc7-e06c5e443fbc") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.524652 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c3d55cda00f6c5763662b3f96b4c36f1fb8c220fcd14ff3469b565deac718a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e6578a529a458eab6242d8f667520303c65bd53ceba7598d4c9680c7a93bace\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:29Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.529194 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.529244 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.529258 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.529278 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.529293 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:29Z","lastTransitionTime":"2025-10-01T13:45:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.543169 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:29Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.561633 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:29Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.586903 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://56f2691df87d1c843a73c06c39c7ad1d5477fb1f10c6f3f797f658e7d0c0d04d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09ecc57bc8186c325ef0bd38c17882024dda3714b52f9b0295a75ed93b2d6736\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f6f39aa4d204b41067d2705ce828e79752ca4e5b7689821e34c98f5041ce925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddba08349bbb0f7040bb04f37d4b5a137de327a5c4a7a5689396a2a1925b062a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://98143f416c776bf79ee24b42b70d88d1cbe4528bd7a1c0fc886a8f6e711c6400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8442849a4d37d618e701a2ea4c1dd33944894765c874fd5635c74d37a7f411b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e41ab9ad83951f275a359365c118fdba9ea91417dc52a9cd98b0a55f84788c64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://516a2c90123655832749bb1f379c3ac1800cbe59053cf56544424f8dabcee07a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T13:45:11Z\\\",\\\"message\\\":\\\"openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:serving-cert service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc0076549ef \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,Protocol:TCP,Port:443,TargetPort:{0 8443 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{app: service-ca-operator,},ClusterIP:10.217.4.40,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.4.40],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nF1001 13:45:11.204670 5928 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 0x1e60020 0x1e5ffc0} was not added to shared informer because it has stopped already, failed to\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:10Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e41ab9ad83951f275a359365c118fdba9ea91417dc52a9cd98b0a55f84788c64\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T13:45:28Z\\\",\\\"message\\\":\\\"onal-cni-plugins-xclfn\\\\nI1001 13:45:28.238175 6140 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1001 13:45:28.238172 6140 lb_config.go:1031] Cluster endpoints for default/kubernetes for network=default are: map[TCP/https:{6443 [192.168.126.11] []}]\\\\nI1001 13:45:28.238207 6140 services_controller.go:443] Built service default/kubernetes LB cluster-wide configs for network=default: []services.lbConfig(nil)\\\\nI1001 13:45:28.238215 6140 services_controller.go:444] Built service default/kubernetes LB per-node configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.4.1\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:443, clusterEndpoints:services.lbEndpoints{Port:6443, V4IPs:[]string{\\\\\\\"192.168.126.11\\\\\\\"}, V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI1001 13:45:28.238228 6140 services_controller.go:445] Built service default/kubernetes LB template configs for network=default: []services.lbConfig(nil)\\\\nF1001 13:45:28.238233 6140 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handle\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d1219f1caadb9f0992fa56b158a23ace3616d178360c4690657deb351a3ccaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-kzv4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:29Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.601695 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gqbr2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a6400c9-7945-44a6-b37d-e94811fc9754\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c32729bcc338f810b05ad3ac0cd10feeeaff031cd81540b79fec759bbc2b419\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2jrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51a4fc25d3fb27e7ea7fba17367d9a0b3fefa388d306e3589ec13e3cf6e61452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2jrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:45:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-gqbr2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:29Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.616647 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3023060-c8ae-492b-b1cb-a418d9a8e59f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd1361d1d9cb03a3942918266a1e85d3e370eabdfa7b7b1e40971995928187b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae5b8e3f4bd159c632b04545707c7140ba6fcee21a3a3847d5e7f2b9e41b9178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdjh7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:29Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.633508 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.633548 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.633558 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.633573 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.633584 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:29Z","lastTransitionTime":"2025-10-01T13:45:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.633659 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xclfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9fc4aae-03cb-458d-83cb-1a3ab9fa9639\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f8eab67511426f65e14781420480f85ea6b0d9ab25f6846a68820a1ded0053f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://14d2a7aa9a0027ef28b5ec7b288ce0a5367c53f616ed3e75216d07c9809a86a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14d2a7aa9a0027ef28b5ec7b288ce0a5367c53f616ed3e75216d07c9809a86a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0abe21aad2433d34c4a769e87e28f3c479511a4580df607d1496ed33c85385c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0abe21aad2433d34c4a769e87e28f3c479511a4580df607d1496ed33c85385c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b870108ecd4816a8d0d3d32f4b2384246c0b4537639e9cd04d660fea3246244\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b870108ecd4816a8d0d3d32f4b2384246c0b4537639e9cd04d660fea3246244\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b96314b06953b56383a86305cf1702c5f02add621d7981a36444d2d3998cf0f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b96314b06953b56383a86305cf1702c5f02add621d7981a36444d2d3998cf0f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f0381694561100e524cc380f1a2c6050c7192f27ec8f27ad2e41abce969c1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24f0381694561100e524cc380f1a2c6050c7192f27ec8f27ad2e41abce969c1c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfb30493a0c650f9536bba6417da0a582b1b23b6488223359856c12d1024d57f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfb30493a0c650f9536bba6417da0a582b1b23b6488223359856c12d1024d57f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xclfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:29Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.655070 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wgx5p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c2ca71f-4cb0-4852-927d-af69be5d77f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1180cac382ded8ae1a7be2e5738d96beceed10f750d31e36ae1520416a71e8dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7gtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wgx5p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:29Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.668171 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jvqzn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4d5a988-e2c8-47db-b738-cb43467b1bfb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17326e690c100180a8e3d1a0180522378ef995bd1f5cba52d6f147a958351118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-985lh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:45:07Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jvqzn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:29Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.686782 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d3d0d59e6a31efc844c56e1ad43cd326a7b2f1844784f2814469e36394cf377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:29Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.704357 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64e2eda677e75448f2e7e3fd477052f3a596e6c11d745848becc6c22f133e6b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:29Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.716414 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6zb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83630902-b99b-4944-81a4-487e9584e0c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0762e1125229327b00202fd05bc17fd641b76f2421e20d0672b3e2d3b0f7538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jt4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6zb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:29Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.738037 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.738076 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.738086 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.738118 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.738133 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:29Z","lastTransitionTime":"2025-10-01T13:45:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.822520 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:45:29 crc kubenswrapper[4605]: E1001 13:45:29.822776 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 13:46:01.822733757 +0000 UTC m=+84.566709995 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.822906 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.822952 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.823001 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.823045 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 13:45:29 crc kubenswrapper[4605]: E1001 13:45:29.823168 4605 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 01 13:45:29 crc kubenswrapper[4605]: E1001 13:45:29.823237 4605 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 01 13:45:29 crc kubenswrapper[4605]: E1001 13:45:29.823288 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-01 13:46:01.823261511 +0000 UTC m=+84.567237759 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 01 13:45:29 crc kubenswrapper[4605]: E1001 13:45:29.823359 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-01 13:46:01.823321003 +0000 UTC m=+84.567297341 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 01 13:45:29 crc kubenswrapper[4605]: E1001 13:45:29.823388 4605 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 01 13:45:29 crc kubenswrapper[4605]: E1001 13:45:29.823416 4605 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 01 13:45:29 crc kubenswrapper[4605]: E1001 13:45:29.823437 4605 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 13:45:29 crc kubenswrapper[4605]: E1001 13:45:29.823432 4605 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 01 13:45:29 crc kubenswrapper[4605]: E1001 13:45:29.823499 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-01 13:46:01.823481157 +0000 UTC m=+84.567457405 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 13:45:29 crc kubenswrapper[4605]: E1001 13:45:29.823515 4605 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 01 13:45:29 crc kubenswrapper[4605]: E1001 13:45:29.823572 4605 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 13:45:29 crc kubenswrapper[4605]: E1001 13:45:29.823672 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-01 13:46:01.823646082 +0000 UTC m=+84.567622450 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.840705 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.840774 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.840793 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.840825 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.840846 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:29Z","lastTransitionTime":"2025-10-01T13:45:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.926655 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.926779 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-m7ph7" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.926772 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.926670 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 13:45:29 crc kubenswrapper[4605]: E1001 13:45:29.926932 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 13:45:29 crc kubenswrapper[4605]: E1001 13:45:29.927109 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-m7ph7" podUID="8c172ce5-f64e-417d-9fc7-e06c5e443fbc" Oct 01 13:45:29 crc kubenswrapper[4605]: E1001 13:45:29.927221 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 13:45:29 crc kubenswrapper[4605]: E1001 13:45:29.927406 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.943483 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.943513 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.943524 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.943541 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:29 crc kubenswrapper[4605]: I1001 13:45:29.943553 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:29Z","lastTransitionTime":"2025-10-01T13:45:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:30 crc kubenswrapper[4605]: I1001 13:45:30.047561 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:30 crc kubenswrapper[4605]: I1001 13:45:30.047606 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:30 crc kubenswrapper[4605]: I1001 13:45:30.047617 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:30 crc kubenswrapper[4605]: I1001 13:45:30.047635 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:30 crc kubenswrapper[4605]: I1001 13:45:30.047646 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:30Z","lastTransitionTime":"2025-10-01T13:45:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:30 crc kubenswrapper[4605]: I1001 13:45:30.150668 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:30 crc kubenswrapper[4605]: I1001 13:45:30.150764 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:30 crc kubenswrapper[4605]: I1001 13:45:30.150792 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:30 crc kubenswrapper[4605]: I1001 13:45:30.150826 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:30 crc kubenswrapper[4605]: I1001 13:45:30.150849 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:30Z","lastTransitionTime":"2025-10-01T13:45:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:30 crc kubenswrapper[4605]: I1001 13:45:30.253388 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:30 crc kubenswrapper[4605]: I1001 13:45:30.253422 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:30 crc kubenswrapper[4605]: I1001 13:45:30.253430 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:30 crc kubenswrapper[4605]: I1001 13:45:30.253446 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:30 crc kubenswrapper[4605]: I1001 13:45:30.253455 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:30Z","lastTransitionTime":"2025-10-01T13:45:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:30 crc kubenswrapper[4605]: I1001 13:45:30.356459 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:30 crc kubenswrapper[4605]: I1001 13:45:30.356541 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:30 crc kubenswrapper[4605]: I1001 13:45:30.356569 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:30 crc kubenswrapper[4605]: I1001 13:45:30.356604 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:30 crc kubenswrapper[4605]: I1001 13:45:30.356627 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:30Z","lastTransitionTime":"2025-10-01T13:45:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:30 crc kubenswrapper[4605]: I1001 13:45:30.421152 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-kzv4p_e0b90c02-c41c-4f5b-ae0a-c6444435a3ae/ovnkube-controller/2.log" Oct 01 13:45:30 crc kubenswrapper[4605]: I1001 13:45:30.427434 4605 scope.go:117] "RemoveContainer" containerID="e41ab9ad83951f275a359365c118fdba9ea91417dc52a9cd98b0a55f84788c64" Oct 01 13:45:30 crc kubenswrapper[4605]: E1001 13:45:30.427726 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-kzv4p_openshift-ovn-kubernetes(e0b90c02-c41c-4f5b-ae0a-c6444435a3ae)\"" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" podUID="e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" Oct 01 13:45:30 crc kubenswrapper[4605]: I1001 13:45:30.449992 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:30Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:30 crc kubenswrapper[4605]: I1001 13:45:30.459893 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:30 crc kubenswrapper[4605]: I1001 13:45:30.459988 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:30 crc kubenswrapper[4605]: I1001 13:45:30.460438 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:30 crc kubenswrapper[4605]: I1001 13:45:30.460524 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:30 crc kubenswrapper[4605]: I1001 13:45:30.460964 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:30Z","lastTransitionTime":"2025-10-01T13:45:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:30 crc kubenswrapper[4605]: I1001 13:45:30.471199 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-m7ph7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8c172ce5-f64e-417d-9fc7-e06c5e443fbc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5krq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5krq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:45:13Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-m7ph7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:30Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:30 crc kubenswrapper[4605]: I1001 13:45:30.486632 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c3d55cda00f6c5763662b3f96b4c36f1fb8c220fcd14ff3469b565deac718a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e6578a529a458eab6242d8f667520303c65bd53ceba7598d4c9680c7a93bace\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:30Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:30 crc kubenswrapper[4605]: I1001 13:45:30.506144 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:30Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:30 crc kubenswrapper[4605]: I1001 13:45:30.523808 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wgx5p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c2ca71f-4cb0-4852-927d-af69be5d77f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1180cac382ded8ae1a7be2e5738d96beceed10f750d31e36ae1520416a71e8dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7gtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wgx5p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:30Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:30 crc kubenswrapper[4605]: I1001 13:45:30.547167 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://56f2691df87d1c843a73c06c39c7ad1d5477fb1f10c6f3f797f658e7d0c0d04d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09ecc57bc8186c325ef0bd38c17882024dda3714b52f9b0295a75ed93b2d6736\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f6f39aa4d204b41067d2705ce828e79752ca4e5b7689821e34c98f5041ce925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddba08349bbb0f7040bb04f37d4b5a137de327a5c4a7a5689396a2a1925b062a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://98143f416c776bf79ee24b42b70d88d1cbe4528bd7a1c0fc886a8f6e711c6400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8442849a4d37d618e701a2ea4c1dd33944894765c874fd5635c74d37a7f411b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e41ab9ad83951f275a359365c118fdba9ea91417dc52a9cd98b0a55f84788c64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e41ab9ad83951f275a359365c118fdba9ea91417dc52a9cd98b0a55f84788c64\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T13:45:28Z\\\",\\\"message\\\":\\\"onal-cni-plugins-xclfn\\\\nI1001 13:45:28.238175 6140 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1001 13:45:28.238172 6140 lb_config.go:1031] Cluster endpoints for default/kubernetes for network=default are: map[TCP/https:{6443 [192.168.126.11] []}]\\\\nI1001 13:45:28.238207 6140 services_controller.go:443] Built service default/kubernetes LB cluster-wide configs for network=default: []services.lbConfig(nil)\\\\nI1001 13:45:28.238215 6140 services_controller.go:444] Built service default/kubernetes LB per-node configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.4.1\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:443, clusterEndpoints:services.lbEndpoints{Port:6443, V4IPs:[]string{\\\\\\\"192.168.126.11\\\\\\\"}, V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI1001 13:45:28.238228 6140 services_controller.go:445] Built service default/kubernetes LB template configs for network=default: []services.lbConfig(nil)\\\\nF1001 13:45:28.238233 6140 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handle\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:27Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-kzv4p_openshift-ovn-kubernetes(e0b90c02-c41c-4f5b-ae0a-c6444435a3ae)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d1219f1caadb9f0992fa56b158a23ace3616d178360c4690657deb351a3ccaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-kzv4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:30Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:30 crc kubenswrapper[4605]: I1001 13:45:30.563602 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gqbr2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a6400c9-7945-44a6-b37d-e94811fc9754\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c32729bcc338f810b05ad3ac0cd10feeeaff031cd81540b79fec759bbc2b419\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2jrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51a4fc25d3fb27e7ea7fba17367d9a0b3fefa388d306e3589ec13e3cf6e61452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2jrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:45:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-gqbr2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:30Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:30 crc kubenswrapper[4605]: I1001 13:45:30.565355 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:30 crc kubenswrapper[4605]: I1001 13:45:30.565403 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:30 crc kubenswrapper[4605]: I1001 13:45:30.565419 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:30 crc kubenswrapper[4605]: I1001 13:45:30.565440 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:30 crc kubenswrapper[4605]: I1001 13:45:30.565456 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:30Z","lastTransitionTime":"2025-10-01T13:45:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:30 crc kubenswrapper[4605]: I1001 13:45:30.580132 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3023060-c8ae-492b-b1cb-a418d9a8e59f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd1361d1d9cb03a3942918266a1e85d3e370eabdfa7b7b1e40971995928187b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae5b8e3f4bd159c632b04545707c7140ba6fcee21a3a3847d5e7f2b9e41b9178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdjh7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:30Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:30 crc kubenswrapper[4605]: I1001 13:45:30.602872 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xclfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9fc4aae-03cb-458d-83cb-1a3ab9fa9639\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f8eab67511426f65e14781420480f85ea6b0d9ab25f6846a68820a1ded0053f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://14d2a7aa9a0027ef28b5ec7b288ce0a5367c53f616ed3e75216d07c9809a86a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14d2a7aa9a0027ef28b5ec7b288ce0a5367c53f616ed3e75216d07c9809a86a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0abe21aad2433d34c4a769e87e28f3c479511a4580df607d1496ed33c85385c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0abe21aad2433d34c4a769e87e28f3c479511a4580df607d1496ed33c85385c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b870108ecd4816a8d0d3d32f4b2384246c0b4537639e9cd04d660fea3246244\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b870108ecd4816a8d0d3d32f4b2384246c0b4537639e9cd04d660fea3246244\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b96314b06953b56383a86305cf1702c5f02add621d7981a36444d2d3998cf0f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b96314b06953b56383a86305cf1702c5f02add621d7981a36444d2d3998cf0f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f0381694561100e524cc380f1a2c6050c7192f27ec8f27ad2e41abce969c1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24f0381694561100e524cc380f1a2c6050c7192f27ec8f27ad2e41abce969c1c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfb30493a0c650f9536bba6417da0a582b1b23b6488223359856c12d1024d57f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfb30493a0c650f9536bba6417da0a582b1b23b6488223359856c12d1024d57f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xclfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:30Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:30 crc kubenswrapper[4605]: I1001 13:45:30.614885 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6zb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83630902-b99b-4944-81a4-487e9584e0c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0762e1125229327b00202fd05bc17fd641b76f2421e20d0672b3e2d3b0f7538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jt4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6zb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:30Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:30 crc kubenswrapper[4605]: I1001 13:45:30.628908 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jvqzn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4d5a988-e2c8-47db-b738-cb43467b1bfb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17326e690c100180a8e3d1a0180522378ef995bd1f5cba52d6f147a958351118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-985lh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:45:07Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jvqzn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:30Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:30 crc kubenswrapper[4605]: I1001 13:45:30.651716 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d3d0d59e6a31efc844c56e1ad43cd326a7b2f1844784f2814469e36394cf377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:30Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:30 crc kubenswrapper[4605]: I1001 13:45:30.668875 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:30 crc kubenswrapper[4605]: I1001 13:45:30.668918 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:30 crc kubenswrapper[4605]: I1001 13:45:30.668928 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:30 crc kubenswrapper[4605]: I1001 13:45:30.668944 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:30 crc kubenswrapper[4605]: I1001 13:45:30.668954 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:30Z","lastTransitionTime":"2025-10-01T13:45:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:30 crc kubenswrapper[4605]: I1001 13:45:30.670170 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64e2eda677e75448f2e7e3fd477052f3a596e6c11d745848becc6c22f133e6b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:30Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:30 crc kubenswrapper[4605]: I1001 13:45:30.687448 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ca1d91d-0902-4d3a-b66a-a556b5009d8b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acf6d9c6b834cf378303c7ee6e1af3f3cde2502d8f28a6e5d3ec33deb69434b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4200a3723d31af3d800fca144949b047d3ef2960d856f286899351523593061\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://671dc002b0ca1a50b36373cbf0a8971b0f751989c9f19acedb524b09afd53517\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ab08a748b9c3040ea1af963f8ebeef630d7fb260122baba05229615424850d6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:30Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:30 crc kubenswrapper[4605]: I1001 13:45:30.703658 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e761e16e-22e8-4656-94dc-0911ae6f6f05\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c3d4d08023677ab885a54e3844be97affb4cf2b7629495bd3ff5c2ca2ea572e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb5327594e9731b96a498f5d9b0c5011693e2c7ef4af097649f9a4fce12aaf24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e397c3307f660a4316c953e5b4a6440a8fd584b7d06b514833a2514c99f5d22b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2705745b03d5e0b8fdb38b0828944ce25553b45328d0ffac4762c3007c37a376\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2705745b03d5e0b8fdb38b0828944ce25553b45328d0ffac4762c3007c37a376\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:30Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:30 crc kubenswrapper[4605]: I1001 13:45:30.724602 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:30Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:30 crc kubenswrapper[4605]: I1001 13:45:30.748759 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eda516c-c3a2-4e46-b9c2-b603ebc2d618\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c15c835868f9e04648db64fcf271a4c212d406e7bcc7cfca15167b853a02a70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d0dbfcc18f63ddc136fc049c023f326cb882adbcf1123f1ec5d4b884ae3970a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed56fd63f436435206d3574740efba75e8a94582fe128388e03b398131acd4ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80bd23f5be42dc5714bba2b0f742b8aa9fdd60540db1048054e76c00f356b240\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83498000b4c2e2ea1b498069626fa6916e9fc5e95a3ecf0136b2cb0ca8a409b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759326292\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759326291\\\\\\\\\\\\\\\" (2025-10-01 12:44:51 +0000 UTC to 2026-10-01 12:44:51 +0000 UTC (now=2025-10-01 13:44:57.347830124 +0000 UTC))\\\\\\\"\\\\nI1001 13:44:57.347872 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1001 13:44:57.347895 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1001 13:44:57.347920 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347949 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347989 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4229714179/tls.crt::/tmp/serving-cert-4229714179/tls.key\\\\\\\"\\\\nI1001 13:44:57.348126 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348139 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1001 13:44:57.348152 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348158 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1001 13:44:57.348154 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1001 13:44:57.348212 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1001 13:44:57.348222 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF1001 13:44:57.350577 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:41Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://113f3599c725570c4484ed005921c30a43db5ffec24b72907c6c7546453fe363\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:30Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:30 crc kubenswrapper[4605]: I1001 13:45:30.772336 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:30 crc kubenswrapper[4605]: I1001 13:45:30.772391 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:30 crc kubenswrapper[4605]: I1001 13:45:30.772403 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:30 crc kubenswrapper[4605]: I1001 13:45:30.772419 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:30 crc kubenswrapper[4605]: I1001 13:45:30.772430 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:30Z","lastTransitionTime":"2025-10-01T13:45:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:30 crc kubenswrapper[4605]: I1001 13:45:30.875140 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:30 crc kubenswrapper[4605]: I1001 13:45:30.875228 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:30 crc kubenswrapper[4605]: I1001 13:45:30.875253 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:30 crc kubenswrapper[4605]: I1001 13:45:30.875287 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:30 crc kubenswrapper[4605]: I1001 13:45:30.875310 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:30Z","lastTransitionTime":"2025-10-01T13:45:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:30 crc kubenswrapper[4605]: I1001 13:45:30.978264 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:30 crc kubenswrapper[4605]: I1001 13:45:30.978335 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:30 crc kubenswrapper[4605]: I1001 13:45:30.978352 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:30 crc kubenswrapper[4605]: I1001 13:45:30.978369 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:30 crc kubenswrapper[4605]: I1001 13:45:30.978381 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:30Z","lastTransitionTime":"2025-10-01T13:45:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:31 crc kubenswrapper[4605]: I1001 13:45:31.081636 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:31 crc kubenswrapper[4605]: I1001 13:45:31.081685 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:31 crc kubenswrapper[4605]: I1001 13:45:31.081696 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:31 crc kubenswrapper[4605]: I1001 13:45:31.081712 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:31 crc kubenswrapper[4605]: I1001 13:45:31.081722 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:31Z","lastTransitionTime":"2025-10-01T13:45:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:31 crc kubenswrapper[4605]: I1001 13:45:31.184353 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:31 crc kubenswrapper[4605]: I1001 13:45:31.184648 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:31 crc kubenswrapper[4605]: I1001 13:45:31.184737 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:31 crc kubenswrapper[4605]: I1001 13:45:31.184876 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:31 crc kubenswrapper[4605]: I1001 13:45:31.184958 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:31Z","lastTransitionTime":"2025-10-01T13:45:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:31 crc kubenswrapper[4605]: I1001 13:45:31.287737 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:31 crc kubenswrapper[4605]: I1001 13:45:31.287832 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:31 crc kubenswrapper[4605]: I1001 13:45:31.287855 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:31 crc kubenswrapper[4605]: I1001 13:45:31.287890 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:31 crc kubenswrapper[4605]: I1001 13:45:31.287910 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:31Z","lastTransitionTime":"2025-10-01T13:45:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:31 crc kubenswrapper[4605]: I1001 13:45:31.391308 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:31 crc kubenswrapper[4605]: I1001 13:45:31.391712 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:31 crc kubenswrapper[4605]: I1001 13:45:31.391837 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:31 crc kubenswrapper[4605]: I1001 13:45:31.391913 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:31 crc kubenswrapper[4605]: I1001 13:45:31.392018 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:31Z","lastTransitionTime":"2025-10-01T13:45:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:31 crc kubenswrapper[4605]: I1001 13:45:31.495580 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:31 crc kubenswrapper[4605]: I1001 13:45:31.495646 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:31 crc kubenswrapper[4605]: I1001 13:45:31.495658 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:31 crc kubenswrapper[4605]: I1001 13:45:31.495678 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:31 crc kubenswrapper[4605]: I1001 13:45:31.495691 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:31Z","lastTransitionTime":"2025-10-01T13:45:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:31 crc kubenswrapper[4605]: I1001 13:45:31.598035 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:31 crc kubenswrapper[4605]: I1001 13:45:31.598116 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:31 crc kubenswrapper[4605]: I1001 13:45:31.598135 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:31 crc kubenswrapper[4605]: I1001 13:45:31.598166 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:31 crc kubenswrapper[4605]: I1001 13:45:31.598182 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:31Z","lastTransitionTime":"2025-10-01T13:45:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:31 crc kubenswrapper[4605]: I1001 13:45:31.701252 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:31 crc kubenswrapper[4605]: I1001 13:45:31.701336 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:31 crc kubenswrapper[4605]: I1001 13:45:31.701354 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:31 crc kubenswrapper[4605]: I1001 13:45:31.701375 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:31 crc kubenswrapper[4605]: I1001 13:45:31.701389 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:31Z","lastTransitionTime":"2025-10-01T13:45:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:31 crc kubenswrapper[4605]: I1001 13:45:31.804776 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:31 crc kubenswrapper[4605]: I1001 13:45:31.804831 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:31 crc kubenswrapper[4605]: I1001 13:45:31.804844 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:31 crc kubenswrapper[4605]: I1001 13:45:31.804861 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:31 crc kubenswrapper[4605]: I1001 13:45:31.804873 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:31Z","lastTransitionTime":"2025-10-01T13:45:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:31 crc kubenswrapper[4605]: I1001 13:45:31.909023 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:31 crc kubenswrapper[4605]: I1001 13:45:31.909370 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:31 crc kubenswrapper[4605]: I1001 13:45:31.909385 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:31 crc kubenswrapper[4605]: I1001 13:45:31.909404 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:31 crc kubenswrapper[4605]: I1001 13:45:31.909421 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:31Z","lastTransitionTime":"2025-10-01T13:45:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:31 crc kubenswrapper[4605]: I1001 13:45:31.926711 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:45:31 crc kubenswrapper[4605]: E1001 13:45:31.926866 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 13:45:31 crc kubenswrapper[4605]: I1001 13:45:31.926928 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-m7ph7" Oct 01 13:45:31 crc kubenswrapper[4605]: E1001 13:45:31.927184 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-m7ph7" podUID="8c172ce5-f64e-417d-9fc7-e06c5e443fbc" Oct 01 13:45:31 crc kubenswrapper[4605]: I1001 13:45:31.927264 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 13:45:31 crc kubenswrapper[4605]: E1001 13:45:31.927349 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 13:45:31 crc kubenswrapper[4605]: I1001 13:45:31.927803 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 13:45:31 crc kubenswrapper[4605]: E1001 13:45:31.928229 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 13:45:32 crc kubenswrapper[4605]: I1001 13:45:32.014028 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:32 crc kubenswrapper[4605]: I1001 13:45:32.014076 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:32 crc kubenswrapper[4605]: I1001 13:45:32.014108 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:32 crc kubenswrapper[4605]: I1001 13:45:32.014126 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:32 crc kubenswrapper[4605]: I1001 13:45:32.014140 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:32Z","lastTransitionTime":"2025-10-01T13:45:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:32 crc kubenswrapper[4605]: I1001 13:45:32.116867 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:32 crc kubenswrapper[4605]: I1001 13:45:32.116914 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:32 crc kubenswrapper[4605]: I1001 13:45:32.116926 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:32 crc kubenswrapper[4605]: I1001 13:45:32.116946 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:32 crc kubenswrapper[4605]: I1001 13:45:32.116959 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:32Z","lastTransitionTime":"2025-10-01T13:45:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:32 crc kubenswrapper[4605]: I1001 13:45:32.220722 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:32 crc kubenswrapper[4605]: I1001 13:45:32.220779 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:32 crc kubenswrapper[4605]: I1001 13:45:32.220792 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:32 crc kubenswrapper[4605]: I1001 13:45:32.220811 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:32 crc kubenswrapper[4605]: I1001 13:45:32.220821 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:32Z","lastTransitionTime":"2025-10-01T13:45:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:32 crc kubenswrapper[4605]: I1001 13:45:32.323726 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:32 crc kubenswrapper[4605]: I1001 13:45:32.323775 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:32 crc kubenswrapper[4605]: I1001 13:45:32.323786 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:32 crc kubenswrapper[4605]: I1001 13:45:32.323808 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:32 crc kubenswrapper[4605]: I1001 13:45:32.323829 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:32Z","lastTransitionTime":"2025-10-01T13:45:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:32 crc kubenswrapper[4605]: I1001 13:45:32.428086 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:32 crc kubenswrapper[4605]: I1001 13:45:32.428168 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:32 crc kubenswrapper[4605]: I1001 13:45:32.428183 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:32 crc kubenswrapper[4605]: I1001 13:45:32.428208 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:32 crc kubenswrapper[4605]: I1001 13:45:32.428235 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:32Z","lastTransitionTime":"2025-10-01T13:45:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:32 crc kubenswrapper[4605]: I1001 13:45:32.532202 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:32 crc kubenswrapper[4605]: I1001 13:45:32.532277 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:32 crc kubenswrapper[4605]: I1001 13:45:32.532295 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:32 crc kubenswrapper[4605]: I1001 13:45:32.532313 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:32 crc kubenswrapper[4605]: I1001 13:45:32.532326 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:32Z","lastTransitionTime":"2025-10-01T13:45:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:32 crc kubenswrapper[4605]: I1001 13:45:32.635925 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:32 crc kubenswrapper[4605]: I1001 13:45:32.635984 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:32 crc kubenswrapper[4605]: I1001 13:45:32.636006 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:32 crc kubenswrapper[4605]: I1001 13:45:32.636036 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:32 crc kubenswrapper[4605]: I1001 13:45:32.636055 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:32Z","lastTransitionTime":"2025-10-01T13:45:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:32 crc kubenswrapper[4605]: I1001 13:45:32.738861 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:32 crc kubenswrapper[4605]: I1001 13:45:32.738938 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:32 crc kubenswrapper[4605]: I1001 13:45:32.738955 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:32 crc kubenswrapper[4605]: I1001 13:45:32.738971 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:32 crc kubenswrapper[4605]: I1001 13:45:32.738990 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:32Z","lastTransitionTime":"2025-10-01T13:45:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:32 crc kubenswrapper[4605]: I1001 13:45:32.841963 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:32 crc kubenswrapper[4605]: I1001 13:45:32.842048 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:32 crc kubenswrapper[4605]: I1001 13:45:32.842066 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:32 crc kubenswrapper[4605]: I1001 13:45:32.842432 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:32 crc kubenswrapper[4605]: I1001 13:45:32.842482 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:32Z","lastTransitionTime":"2025-10-01T13:45:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:32 crc kubenswrapper[4605]: I1001 13:45:32.945908 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:32 crc kubenswrapper[4605]: I1001 13:45:32.945970 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:32 crc kubenswrapper[4605]: I1001 13:45:32.945983 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:32 crc kubenswrapper[4605]: I1001 13:45:32.946001 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:32 crc kubenswrapper[4605]: I1001 13:45:32.946015 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:32Z","lastTransitionTime":"2025-10-01T13:45:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:33 crc kubenswrapper[4605]: I1001 13:45:33.049724 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:33 crc kubenswrapper[4605]: I1001 13:45:33.049793 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:33 crc kubenswrapper[4605]: I1001 13:45:33.049812 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:33 crc kubenswrapper[4605]: I1001 13:45:33.049839 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:33 crc kubenswrapper[4605]: I1001 13:45:33.049859 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:33Z","lastTransitionTime":"2025-10-01T13:45:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:33 crc kubenswrapper[4605]: I1001 13:45:33.152694 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:33 crc kubenswrapper[4605]: I1001 13:45:33.152745 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:33 crc kubenswrapper[4605]: I1001 13:45:33.152755 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:33 crc kubenswrapper[4605]: I1001 13:45:33.152776 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:33 crc kubenswrapper[4605]: I1001 13:45:33.152789 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:33Z","lastTransitionTime":"2025-10-01T13:45:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:33 crc kubenswrapper[4605]: I1001 13:45:33.256640 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:33 crc kubenswrapper[4605]: I1001 13:45:33.256700 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:33 crc kubenswrapper[4605]: I1001 13:45:33.256714 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:33 crc kubenswrapper[4605]: I1001 13:45:33.256734 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:33 crc kubenswrapper[4605]: I1001 13:45:33.256754 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:33Z","lastTransitionTime":"2025-10-01T13:45:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:33 crc kubenswrapper[4605]: I1001 13:45:33.359963 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:33 crc kubenswrapper[4605]: I1001 13:45:33.360029 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:33 crc kubenswrapper[4605]: I1001 13:45:33.360049 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:33 crc kubenswrapper[4605]: I1001 13:45:33.360076 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:33 crc kubenswrapper[4605]: I1001 13:45:33.360163 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:33Z","lastTransitionTime":"2025-10-01T13:45:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:33 crc kubenswrapper[4605]: I1001 13:45:33.462950 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:33 crc kubenswrapper[4605]: I1001 13:45:33.463009 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:33 crc kubenswrapper[4605]: I1001 13:45:33.463019 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:33 crc kubenswrapper[4605]: I1001 13:45:33.463037 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:33 crc kubenswrapper[4605]: I1001 13:45:33.463049 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:33Z","lastTransitionTime":"2025-10-01T13:45:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:33 crc kubenswrapper[4605]: I1001 13:45:33.565371 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:33 crc kubenswrapper[4605]: I1001 13:45:33.565427 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:33 crc kubenswrapper[4605]: I1001 13:45:33.565440 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:33 crc kubenswrapper[4605]: I1001 13:45:33.565483 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:33 crc kubenswrapper[4605]: I1001 13:45:33.565497 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:33Z","lastTransitionTime":"2025-10-01T13:45:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:33 crc kubenswrapper[4605]: I1001 13:45:33.668033 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:33 crc kubenswrapper[4605]: I1001 13:45:33.668108 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:33 crc kubenswrapper[4605]: I1001 13:45:33.668121 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:33 crc kubenswrapper[4605]: I1001 13:45:33.668141 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:33 crc kubenswrapper[4605]: I1001 13:45:33.668153 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:33Z","lastTransitionTime":"2025-10-01T13:45:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:33 crc kubenswrapper[4605]: I1001 13:45:33.771442 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:33 crc kubenswrapper[4605]: I1001 13:45:33.771484 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:33 crc kubenswrapper[4605]: I1001 13:45:33.771497 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:33 crc kubenswrapper[4605]: I1001 13:45:33.771516 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:33 crc kubenswrapper[4605]: I1001 13:45:33.771529 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:33Z","lastTransitionTime":"2025-10-01T13:45:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:33 crc kubenswrapper[4605]: I1001 13:45:33.874139 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:33 crc kubenswrapper[4605]: I1001 13:45:33.874430 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:33 crc kubenswrapper[4605]: I1001 13:45:33.874474 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:33 crc kubenswrapper[4605]: I1001 13:45:33.874516 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:33 crc kubenswrapper[4605]: I1001 13:45:33.874547 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:33Z","lastTransitionTime":"2025-10-01T13:45:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:33 crc kubenswrapper[4605]: I1001 13:45:33.926376 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 13:45:33 crc kubenswrapper[4605]: I1001 13:45:33.926543 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:45:33 crc kubenswrapper[4605]: E1001 13:45:33.926658 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 13:45:33 crc kubenswrapper[4605]: I1001 13:45:33.926692 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-m7ph7" Oct 01 13:45:33 crc kubenswrapper[4605]: I1001 13:45:33.926743 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 13:45:33 crc kubenswrapper[4605]: E1001 13:45:33.926957 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 13:45:33 crc kubenswrapper[4605]: E1001 13:45:33.926993 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-m7ph7" podUID="8c172ce5-f64e-417d-9fc7-e06c5e443fbc" Oct 01 13:45:33 crc kubenswrapper[4605]: E1001 13:45:33.927041 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 13:45:33 crc kubenswrapper[4605]: I1001 13:45:33.977636 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:33 crc kubenswrapper[4605]: I1001 13:45:33.977673 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:33 crc kubenswrapper[4605]: I1001 13:45:33.977682 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:33 crc kubenswrapper[4605]: I1001 13:45:33.977697 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:33 crc kubenswrapper[4605]: I1001 13:45:33.977706 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:33Z","lastTransitionTime":"2025-10-01T13:45:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:34 crc kubenswrapper[4605]: I1001 13:45:34.080403 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:34 crc kubenswrapper[4605]: I1001 13:45:34.080435 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:34 crc kubenswrapper[4605]: I1001 13:45:34.080445 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:34 crc kubenswrapper[4605]: I1001 13:45:34.080462 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:34 crc kubenswrapper[4605]: I1001 13:45:34.080476 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:34Z","lastTransitionTime":"2025-10-01T13:45:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:34 crc kubenswrapper[4605]: I1001 13:45:34.182414 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:34 crc kubenswrapper[4605]: I1001 13:45:34.182457 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:34 crc kubenswrapper[4605]: I1001 13:45:34.182467 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:34 crc kubenswrapper[4605]: I1001 13:45:34.182483 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:34 crc kubenswrapper[4605]: I1001 13:45:34.182494 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:34Z","lastTransitionTime":"2025-10-01T13:45:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:34 crc kubenswrapper[4605]: I1001 13:45:34.284653 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:34 crc kubenswrapper[4605]: I1001 13:45:34.284696 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:34 crc kubenswrapper[4605]: I1001 13:45:34.284708 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:34 crc kubenswrapper[4605]: I1001 13:45:34.284726 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:34 crc kubenswrapper[4605]: I1001 13:45:34.284738 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:34Z","lastTransitionTime":"2025-10-01T13:45:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:34 crc kubenswrapper[4605]: I1001 13:45:34.387884 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:34 crc kubenswrapper[4605]: I1001 13:45:34.387932 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:34 crc kubenswrapper[4605]: I1001 13:45:34.387952 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:34 crc kubenswrapper[4605]: I1001 13:45:34.387972 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:34 crc kubenswrapper[4605]: I1001 13:45:34.387985 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:34Z","lastTransitionTime":"2025-10-01T13:45:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:34 crc kubenswrapper[4605]: I1001 13:45:34.490442 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:34 crc kubenswrapper[4605]: I1001 13:45:34.490499 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:34 crc kubenswrapper[4605]: I1001 13:45:34.490515 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:34 crc kubenswrapper[4605]: I1001 13:45:34.490542 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:34 crc kubenswrapper[4605]: I1001 13:45:34.490561 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:34Z","lastTransitionTime":"2025-10-01T13:45:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:34 crc kubenswrapper[4605]: I1001 13:45:34.593714 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:34 crc kubenswrapper[4605]: I1001 13:45:34.593784 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:34 crc kubenswrapper[4605]: I1001 13:45:34.593803 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:34 crc kubenswrapper[4605]: I1001 13:45:34.593830 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:34 crc kubenswrapper[4605]: I1001 13:45:34.593849 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:34Z","lastTransitionTime":"2025-10-01T13:45:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:34 crc kubenswrapper[4605]: I1001 13:45:34.696895 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:34 crc kubenswrapper[4605]: I1001 13:45:34.697230 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:34 crc kubenswrapper[4605]: I1001 13:45:34.697252 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:34 crc kubenswrapper[4605]: I1001 13:45:34.697272 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:34 crc kubenswrapper[4605]: I1001 13:45:34.697286 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:34Z","lastTransitionTime":"2025-10-01T13:45:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:34 crc kubenswrapper[4605]: I1001 13:45:34.799945 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:34 crc kubenswrapper[4605]: I1001 13:45:34.799993 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:34 crc kubenswrapper[4605]: I1001 13:45:34.800007 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:34 crc kubenswrapper[4605]: I1001 13:45:34.800029 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:34 crc kubenswrapper[4605]: I1001 13:45:34.800046 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:34Z","lastTransitionTime":"2025-10-01T13:45:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:34 crc kubenswrapper[4605]: I1001 13:45:34.885213 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:34 crc kubenswrapper[4605]: I1001 13:45:34.885251 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:34 crc kubenswrapper[4605]: I1001 13:45:34.885259 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:34 crc kubenswrapper[4605]: I1001 13:45:34.885275 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:34 crc kubenswrapper[4605]: I1001 13:45:34.885286 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:34Z","lastTransitionTime":"2025-10-01T13:45:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:34 crc kubenswrapper[4605]: E1001 13:45:34.909391 4605 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1d17ca42-5162-4e53-b9d0-0c11f7d91daa\\\",\\\"systemUUID\\\":\\\"1ac84113-1352-4ad6-8d32-f12829b39b5d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:34Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:34 crc kubenswrapper[4605]: I1001 13:45:34.915935 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:34 crc kubenswrapper[4605]: I1001 13:45:34.916047 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:34 crc kubenswrapper[4605]: I1001 13:45:34.916075 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:34 crc kubenswrapper[4605]: I1001 13:45:34.916148 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:34 crc kubenswrapper[4605]: I1001 13:45:34.916188 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:34Z","lastTransitionTime":"2025-10-01T13:45:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:34 crc kubenswrapper[4605]: E1001 13:45:34.936950 4605 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1d17ca42-5162-4e53-b9d0-0c11f7d91daa\\\",\\\"systemUUID\\\":\\\"1ac84113-1352-4ad6-8d32-f12829b39b5d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:34Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:34 crc kubenswrapper[4605]: I1001 13:45:34.941246 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:34 crc kubenswrapper[4605]: I1001 13:45:34.941289 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:34 crc kubenswrapper[4605]: I1001 13:45:34.941305 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:34 crc kubenswrapper[4605]: I1001 13:45:34.941328 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:34 crc kubenswrapper[4605]: I1001 13:45:34.941345 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:34Z","lastTransitionTime":"2025-10-01T13:45:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:34 crc kubenswrapper[4605]: E1001 13:45:34.961316 4605 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1d17ca42-5162-4e53-b9d0-0c11f7d91daa\\\",\\\"systemUUID\\\":\\\"1ac84113-1352-4ad6-8d32-f12829b39b5d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:34Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:34 crc kubenswrapper[4605]: I1001 13:45:34.966280 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:34 crc kubenswrapper[4605]: I1001 13:45:34.966486 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:34 crc kubenswrapper[4605]: I1001 13:45:34.966647 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:34 crc kubenswrapper[4605]: I1001 13:45:34.966794 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:34 crc kubenswrapper[4605]: I1001 13:45:34.966922 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:34Z","lastTransitionTime":"2025-10-01T13:45:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:34 crc kubenswrapper[4605]: E1001 13:45:34.988207 4605 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1d17ca42-5162-4e53-b9d0-0c11f7d91daa\\\",\\\"systemUUID\\\":\\\"1ac84113-1352-4ad6-8d32-f12829b39b5d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:34Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:34 crc kubenswrapper[4605]: I1001 13:45:34.992510 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:34 crc kubenswrapper[4605]: I1001 13:45:34.992578 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:34 crc kubenswrapper[4605]: I1001 13:45:34.992592 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:34 crc kubenswrapper[4605]: I1001 13:45:34.992635 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:34 crc kubenswrapper[4605]: I1001 13:45:34.992651 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:34Z","lastTransitionTime":"2025-10-01T13:45:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:35 crc kubenswrapper[4605]: E1001 13:45:35.004978 4605 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1d17ca42-5162-4e53-b9d0-0c11f7d91daa\\\",\\\"systemUUID\\\":\\\"1ac84113-1352-4ad6-8d32-f12829b39b5d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:35Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:35 crc kubenswrapper[4605]: E1001 13:45:35.005115 4605 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 01 13:45:35 crc kubenswrapper[4605]: I1001 13:45:35.007465 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:35 crc kubenswrapper[4605]: I1001 13:45:35.007507 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:35 crc kubenswrapper[4605]: I1001 13:45:35.007522 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:35 crc kubenswrapper[4605]: I1001 13:45:35.007545 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:35 crc kubenswrapper[4605]: I1001 13:45:35.007562 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:35Z","lastTransitionTime":"2025-10-01T13:45:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:35 crc kubenswrapper[4605]: I1001 13:45:35.110184 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:35 crc kubenswrapper[4605]: I1001 13:45:35.110224 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:35 crc kubenswrapper[4605]: I1001 13:45:35.110236 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:35 crc kubenswrapper[4605]: I1001 13:45:35.110253 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:35 crc kubenswrapper[4605]: I1001 13:45:35.110264 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:35Z","lastTransitionTime":"2025-10-01T13:45:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:35 crc kubenswrapper[4605]: I1001 13:45:35.212702 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:35 crc kubenswrapper[4605]: I1001 13:45:35.212738 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:35 crc kubenswrapper[4605]: I1001 13:45:35.212753 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:35 crc kubenswrapper[4605]: I1001 13:45:35.212770 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:35 crc kubenswrapper[4605]: I1001 13:45:35.212780 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:35Z","lastTransitionTime":"2025-10-01T13:45:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:35 crc kubenswrapper[4605]: I1001 13:45:35.316715 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:35 crc kubenswrapper[4605]: I1001 13:45:35.316806 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:35 crc kubenswrapper[4605]: I1001 13:45:35.316824 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:35 crc kubenswrapper[4605]: I1001 13:45:35.316878 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:35 crc kubenswrapper[4605]: I1001 13:45:35.316897 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:35Z","lastTransitionTime":"2025-10-01T13:45:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:35 crc kubenswrapper[4605]: I1001 13:45:35.420327 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:35 crc kubenswrapper[4605]: I1001 13:45:35.420379 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:35 crc kubenswrapper[4605]: I1001 13:45:35.420394 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:35 crc kubenswrapper[4605]: I1001 13:45:35.420415 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:35 crc kubenswrapper[4605]: I1001 13:45:35.420429 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:35Z","lastTransitionTime":"2025-10-01T13:45:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:35 crc kubenswrapper[4605]: I1001 13:45:35.524050 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:35 crc kubenswrapper[4605]: I1001 13:45:35.524393 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:35 crc kubenswrapper[4605]: I1001 13:45:35.524497 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:35 crc kubenswrapper[4605]: I1001 13:45:35.524592 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:35 crc kubenswrapper[4605]: I1001 13:45:35.525225 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:35Z","lastTransitionTime":"2025-10-01T13:45:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:35 crc kubenswrapper[4605]: I1001 13:45:35.630212 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:35 crc kubenswrapper[4605]: I1001 13:45:35.630255 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:35 crc kubenswrapper[4605]: I1001 13:45:35.630270 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:35 crc kubenswrapper[4605]: I1001 13:45:35.630292 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:35 crc kubenswrapper[4605]: I1001 13:45:35.630308 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:35Z","lastTransitionTime":"2025-10-01T13:45:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:35 crc kubenswrapper[4605]: I1001 13:45:35.733157 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:35 crc kubenswrapper[4605]: I1001 13:45:35.733619 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:35 crc kubenswrapper[4605]: I1001 13:45:35.733706 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:35 crc kubenswrapper[4605]: I1001 13:45:35.733787 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:35 crc kubenswrapper[4605]: I1001 13:45:35.733860 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:35Z","lastTransitionTime":"2025-10-01T13:45:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:35 crc kubenswrapper[4605]: I1001 13:45:35.837691 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:35 crc kubenswrapper[4605]: I1001 13:45:35.838079 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:35 crc kubenswrapper[4605]: I1001 13:45:35.838195 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:35 crc kubenswrapper[4605]: I1001 13:45:35.838282 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:35 crc kubenswrapper[4605]: I1001 13:45:35.838360 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:35Z","lastTransitionTime":"2025-10-01T13:45:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:35 crc kubenswrapper[4605]: I1001 13:45:35.926300 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:45:35 crc kubenswrapper[4605]: E1001 13:45:35.926857 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 13:45:35 crc kubenswrapper[4605]: I1001 13:45:35.926468 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 13:45:35 crc kubenswrapper[4605]: E1001 13:45:35.927164 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 13:45:35 crc kubenswrapper[4605]: I1001 13:45:35.926588 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 13:45:35 crc kubenswrapper[4605]: I1001 13:45:35.926428 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-m7ph7" Oct 01 13:45:35 crc kubenswrapper[4605]: E1001 13:45:35.927559 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 13:45:35 crc kubenswrapper[4605]: E1001 13:45:35.927645 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-m7ph7" podUID="8c172ce5-f64e-417d-9fc7-e06c5e443fbc" Oct 01 13:45:35 crc kubenswrapper[4605]: I1001 13:45:35.941235 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:35 crc kubenswrapper[4605]: I1001 13:45:35.941279 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:35 crc kubenswrapper[4605]: I1001 13:45:35.941290 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:35 crc kubenswrapper[4605]: I1001 13:45:35.941316 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:35 crc kubenswrapper[4605]: I1001 13:45:35.941328 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:35Z","lastTransitionTime":"2025-10-01T13:45:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:36 crc kubenswrapper[4605]: I1001 13:45:36.044485 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:36 crc kubenswrapper[4605]: I1001 13:45:36.044534 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:36 crc kubenswrapper[4605]: I1001 13:45:36.044546 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:36 crc kubenswrapper[4605]: I1001 13:45:36.044565 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:36 crc kubenswrapper[4605]: I1001 13:45:36.044579 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:36Z","lastTransitionTime":"2025-10-01T13:45:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:36 crc kubenswrapper[4605]: I1001 13:45:36.147495 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:36 crc kubenswrapper[4605]: I1001 13:45:36.147543 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:36 crc kubenswrapper[4605]: I1001 13:45:36.147559 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:36 crc kubenswrapper[4605]: I1001 13:45:36.147577 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:36 crc kubenswrapper[4605]: I1001 13:45:36.147590 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:36Z","lastTransitionTime":"2025-10-01T13:45:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:36 crc kubenswrapper[4605]: I1001 13:45:36.251655 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:36 crc kubenswrapper[4605]: I1001 13:45:36.251736 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:36 crc kubenswrapper[4605]: I1001 13:45:36.251760 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:36 crc kubenswrapper[4605]: I1001 13:45:36.251791 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:36 crc kubenswrapper[4605]: I1001 13:45:36.251814 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:36Z","lastTransitionTime":"2025-10-01T13:45:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:36 crc kubenswrapper[4605]: I1001 13:45:36.355545 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:36 crc kubenswrapper[4605]: I1001 13:45:36.355954 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:36 crc kubenswrapper[4605]: I1001 13:45:36.356062 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:36 crc kubenswrapper[4605]: I1001 13:45:36.356196 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:36 crc kubenswrapper[4605]: I1001 13:45:36.356279 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:36Z","lastTransitionTime":"2025-10-01T13:45:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:36 crc kubenswrapper[4605]: I1001 13:45:36.458894 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:36 crc kubenswrapper[4605]: I1001 13:45:36.458930 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:36 crc kubenswrapper[4605]: I1001 13:45:36.458939 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:36 crc kubenswrapper[4605]: I1001 13:45:36.458952 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:36 crc kubenswrapper[4605]: I1001 13:45:36.458964 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:36Z","lastTransitionTime":"2025-10-01T13:45:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:36 crc kubenswrapper[4605]: I1001 13:45:36.561619 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:36 crc kubenswrapper[4605]: I1001 13:45:36.561881 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:36 crc kubenswrapper[4605]: I1001 13:45:36.561985 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:36 crc kubenswrapper[4605]: I1001 13:45:36.562049 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:36 crc kubenswrapper[4605]: I1001 13:45:36.562207 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:36Z","lastTransitionTime":"2025-10-01T13:45:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:36 crc kubenswrapper[4605]: I1001 13:45:36.664912 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:36 crc kubenswrapper[4605]: I1001 13:45:36.664950 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:36 crc kubenswrapper[4605]: I1001 13:45:36.664959 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:36 crc kubenswrapper[4605]: I1001 13:45:36.664974 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:36 crc kubenswrapper[4605]: I1001 13:45:36.664984 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:36Z","lastTransitionTime":"2025-10-01T13:45:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:36 crc kubenswrapper[4605]: I1001 13:45:36.768133 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:36 crc kubenswrapper[4605]: I1001 13:45:36.768200 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:36 crc kubenswrapper[4605]: I1001 13:45:36.768219 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:36 crc kubenswrapper[4605]: I1001 13:45:36.768245 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:36 crc kubenswrapper[4605]: I1001 13:45:36.768263 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:36Z","lastTransitionTime":"2025-10-01T13:45:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:36 crc kubenswrapper[4605]: I1001 13:45:36.871745 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:36 crc kubenswrapper[4605]: I1001 13:45:36.872297 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:36 crc kubenswrapper[4605]: I1001 13:45:36.872447 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:36 crc kubenswrapper[4605]: I1001 13:45:36.872587 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:36 crc kubenswrapper[4605]: I1001 13:45:36.872721 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:36Z","lastTransitionTime":"2025-10-01T13:45:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:36 crc kubenswrapper[4605]: I1001 13:45:36.975848 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:36 crc kubenswrapper[4605]: I1001 13:45:36.975913 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:36 crc kubenswrapper[4605]: I1001 13:45:36.975928 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:36 crc kubenswrapper[4605]: I1001 13:45:36.975952 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:36 crc kubenswrapper[4605]: I1001 13:45:36.975968 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:36Z","lastTransitionTime":"2025-10-01T13:45:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:37 crc kubenswrapper[4605]: I1001 13:45:37.078708 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:37 crc kubenswrapper[4605]: I1001 13:45:37.078767 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:37 crc kubenswrapper[4605]: I1001 13:45:37.078780 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:37 crc kubenswrapper[4605]: I1001 13:45:37.078802 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:37 crc kubenswrapper[4605]: I1001 13:45:37.078817 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:37Z","lastTransitionTime":"2025-10-01T13:45:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:37 crc kubenswrapper[4605]: I1001 13:45:37.184200 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:37 crc kubenswrapper[4605]: I1001 13:45:37.185120 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:37 crc kubenswrapper[4605]: I1001 13:45:37.185223 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:37 crc kubenswrapper[4605]: I1001 13:45:37.185303 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:37 crc kubenswrapper[4605]: I1001 13:45:37.185433 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:37Z","lastTransitionTime":"2025-10-01T13:45:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:37 crc kubenswrapper[4605]: I1001 13:45:37.288833 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:37 crc kubenswrapper[4605]: I1001 13:45:37.289485 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:37 crc kubenswrapper[4605]: I1001 13:45:37.289534 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:37 crc kubenswrapper[4605]: I1001 13:45:37.289555 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:37 crc kubenswrapper[4605]: I1001 13:45:37.289567 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:37Z","lastTransitionTime":"2025-10-01T13:45:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:37 crc kubenswrapper[4605]: I1001 13:45:37.392796 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:37 crc kubenswrapper[4605]: I1001 13:45:37.392909 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:37 crc kubenswrapper[4605]: I1001 13:45:37.392930 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:37 crc kubenswrapper[4605]: I1001 13:45:37.392963 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:37 crc kubenswrapper[4605]: I1001 13:45:37.392984 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:37Z","lastTransitionTime":"2025-10-01T13:45:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:37 crc kubenswrapper[4605]: I1001 13:45:37.496918 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:37 crc kubenswrapper[4605]: I1001 13:45:37.496969 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:37 crc kubenswrapper[4605]: I1001 13:45:37.496990 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:37 crc kubenswrapper[4605]: I1001 13:45:37.497009 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:37 crc kubenswrapper[4605]: I1001 13:45:37.497029 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:37Z","lastTransitionTime":"2025-10-01T13:45:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:37 crc kubenswrapper[4605]: I1001 13:45:37.600409 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:37 crc kubenswrapper[4605]: I1001 13:45:37.600484 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:37 crc kubenswrapper[4605]: I1001 13:45:37.600507 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:37 crc kubenswrapper[4605]: I1001 13:45:37.600535 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:37 crc kubenswrapper[4605]: I1001 13:45:37.600555 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:37Z","lastTransitionTime":"2025-10-01T13:45:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:37 crc kubenswrapper[4605]: I1001 13:45:37.703669 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:37 crc kubenswrapper[4605]: I1001 13:45:37.703722 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:37 crc kubenswrapper[4605]: I1001 13:45:37.703735 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:37 crc kubenswrapper[4605]: I1001 13:45:37.703753 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:37 crc kubenswrapper[4605]: I1001 13:45:37.703765 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:37Z","lastTransitionTime":"2025-10-01T13:45:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:37 crc kubenswrapper[4605]: I1001 13:45:37.806264 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:37 crc kubenswrapper[4605]: I1001 13:45:37.806301 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:37 crc kubenswrapper[4605]: I1001 13:45:37.806311 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:37 crc kubenswrapper[4605]: I1001 13:45:37.806328 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:37 crc kubenswrapper[4605]: I1001 13:45:37.806341 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:37Z","lastTransitionTime":"2025-10-01T13:45:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:37 crc kubenswrapper[4605]: I1001 13:45:37.908411 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:37 crc kubenswrapper[4605]: I1001 13:45:37.908450 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:37 crc kubenswrapper[4605]: I1001 13:45:37.908461 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:37 crc kubenswrapper[4605]: I1001 13:45:37.908475 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:37 crc kubenswrapper[4605]: I1001 13:45:37.908484 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:37Z","lastTransitionTime":"2025-10-01T13:45:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:37 crc kubenswrapper[4605]: I1001 13:45:37.926195 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 13:45:37 crc kubenswrapper[4605]: I1001 13:45:37.926247 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-m7ph7" Oct 01 13:45:37 crc kubenswrapper[4605]: E1001 13:45:37.926307 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 13:45:37 crc kubenswrapper[4605]: I1001 13:45:37.926199 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 13:45:37 crc kubenswrapper[4605]: E1001 13:45:37.926442 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-m7ph7" podUID="8c172ce5-f64e-417d-9fc7-e06c5e443fbc" Oct 01 13:45:37 crc kubenswrapper[4605]: E1001 13:45:37.926506 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 13:45:37 crc kubenswrapper[4605]: I1001 13:45:37.926568 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:45:37 crc kubenswrapper[4605]: E1001 13:45:37.926776 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 13:45:37 crc kubenswrapper[4605]: I1001 13:45:37.948002 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d3d0d59e6a31efc844c56e1ad43cd326a7b2f1844784f2814469e36394cf377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:37Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:37 crc kubenswrapper[4605]: I1001 13:45:37.967730 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64e2eda677e75448f2e7e3fd477052f3a596e6c11d745848becc6c22f133e6b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:37Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:37 crc kubenswrapper[4605]: I1001 13:45:37.977962 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6zb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83630902-b99b-4944-81a4-487e9584e0c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0762e1125229327b00202fd05bc17fd641b76f2421e20d0672b3e2d3b0f7538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jt4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6zb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:37Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:37 crc kubenswrapper[4605]: I1001 13:45:37.987147 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jvqzn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4d5a988-e2c8-47db-b738-cb43467b1bfb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17326e690c100180a8e3d1a0180522378ef995bd1f5cba52d6f147a958351118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-985lh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:45:07Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jvqzn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:37Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:38 crc kubenswrapper[4605]: I1001 13:45:38.002913 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:37Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:38 crc kubenswrapper[4605]: I1001 13:45:38.010644 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:38 crc kubenswrapper[4605]: I1001 13:45:38.010692 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:38 crc kubenswrapper[4605]: I1001 13:45:38.010702 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:38 crc kubenswrapper[4605]: I1001 13:45:38.010728 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:38 crc kubenswrapper[4605]: I1001 13:45:38.010739 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:38Z","lastTransitionTime":"2025-10-01T13:45:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:38 crc kubenswrapper[4605]: I1001 13:45:38.022749 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eda516c-c3a2-4e46-b9c2-b603ebc2d618\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c15c835868f9e04648db64fcf271a4c212d406e7bcc7cfca15167b853a02a70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d0dbfcc18f63ddc136fc049c023f326cb882adbcf1123f1ec5d4b884ae3970a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed56fd63f436435206d3574740efba75e8a94582fe128388e03b398131acd4ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80bd23f5be42dc5714bba2b0f742b8aa9fdd60540db1048054e76c00f356b240\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83498000b4c2e2ea1b498069626fa6916e9fc5e95a3ecf0136b2cb0ca8a409b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759326292\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759326291\\\\\\\\\\\\\\\" (2025-10-01 12:44:51 +0000 UTC to 2026-10-01 12:44:51 +0000 UTC (now=2025-10-01 13:44:57.347830124 +0000 UTC))\\\\\\\"\\\\nI1001 13:44:57.347872 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1001 13:44:57.347895 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1001 13:44:57.347920 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347949 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347989 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4229714179/tls.crt::/tmp/serving-cert-4229714179/tls.key\\\\\\\"\\\\nI1001 13:44:57.348126 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348139 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1001 13:44:57.348152 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348158 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1001 13:44:57.348154 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1001 13:44:57.348212 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1001 13:44:57.348222 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF1001 13:44:57.350577 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:41Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://113f3599c725570c4484ed005921c30a43db5ffec24b72907c6c7546453fe363\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:38Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:38 crc kubenswrapper[4605]: I1001 13:45:38.054329 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ca1d91d-0902-4d3a-b66a-a556b5009d8b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acf6d9c6b834cf378303c7ee6e1af3f3cde2502d8f28a6e5d3ec33deb69434b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4200a3723d31af3d800fca144949b047d3ef2960d856f286899351523593061\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://671dc002b0ca1a50b36373cbf0a8971b0f751989c9f19acedb524b09afd53517\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ab08a748b9c3040ea1af963f8ebeef630d7fb260122baba05229615424850d6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:38Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:38 crc kubenswrapper[4605]: I1001 13:45:38.072402 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e761e16e-22e8-4656-94dc-0911ae6f6f05\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c3d4d08023677ab885a54e3844be97affb4cf2b7629495bd3ff5c2ca2ea572e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb5327594e9731b96a498f5d9b0c5011693e2c7ef4af097649f9a4fce12aaf24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e397c3307f660a4316c953e5b4a6440a8fd584b7d06b514833a2514c99f5d22b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2705745b03d5e0b8fdb38b0828944ce25553b45328d0ffac4762c3007c37a376\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2705745b03d5e0b8fdb38b0828944ce25553b45328d0ffac4762c3007c37a376\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:38Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:38 crc kubenswrapper[4605]: I1001 13:45:38.091064 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c3d55cda00f6c5763662b3f96b4c36f1fb8c220fcd14ff3469b565deac718a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e6578a529a458eab6242d8f667520303c65bd53ceba7598d4c9680c7a93bace\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:38Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:38 crc kubenswrapper[4605]: I1001 13:45:38.103134 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:38Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:38 crc kubenswrapper[4605]: I1001 13:45:38.113682 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:38 crc kubenswrapper[4605]: I1001 13:45:38.113737 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:38 crc kubenswrapper[4605]: I1001 13:45:38.113750 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:38 crc kubenswrapper[4605]: I1001 13:45:38.113770 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:38 crc kubenswrapper[4605]: I1001 13:45:38.113783 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:38Z","lastTransitionTime":"2025-10-01T13:45:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:38 crc kubenswrapper[4605]: I1001 13:45:38.115801 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:38Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:38 crc kubenswrapper[4605]: I1001 13:45:38.125907 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-m7ph7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8c172ce5-f64e-417d-9fc7-e06c5e443fbc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5krq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5krq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:45:13Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-m7ph7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:38Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:38 crc kubenswrapper[4605]: I1001 13:45:38.137423 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gqbr2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a6400c9-7945-44a6-b37d-e94811fc9754\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c32729bcc338f810b05ad3ac0cd10feeeaff031cd81540b79fec759bbc2b419\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2jrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51a4fc25d3fb27e7ea7fba17367d9a0b3fefa388d306e3589ec13e3cf6e61452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2jrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:45:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-gqbr2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:38Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:38 crc kubenswrapper[4605]: I1001 13:45:38.147044 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3023060-c8ae-492b-b1cb-a418d9a8e59f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd1361d1d9cb03a3942918266a1e85d3e370eabdfa7b7b1e40971995928187b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae5b8e3f4bd159c632b04545707c7140ba6fcee21a3a3847d5e7f2b9e41b9178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdjh7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:38Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:38 crc kubenswrapper[4605]: I1001 13:45:38.160501 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xclfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9fc4aae-03cb-458d-83cb-1a3ab9fa9639\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f8eab67511426f65e14781420480f85ea6b0d9ab25f6846a68820a1ded0053f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://14d2a7aa9a0027ef28b5ec7b288ce0a5367c53f616ed3e75216d07c9809a86a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14d2a7aa9a0027ef28b5ec7b288ce0a5367c53f616ed3e75216d07c9809a86a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0abe21aad2433d34c4a769e87e28f3c479511a4580df607d1496ed33c85385c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0abe21aad2433d34c4a769e87e28f3c479511a4580df607d1496ed33c85385c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b870108ecd4816a8d0d3d32f4b2384246c0b4537639e9cd04d660fea3246244\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b870108ecd4816a8d0d3d32f4b2384246c0b4537639e9cd04d660fea3246244\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b96314b06953b56383a86305cf1702c5f02add621d7981a36444d2d3998cf0f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b96314b06953b56383a86305cf1702c5f02add621d7981a36444d2d3998cf0f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f0381694561100e524cc380f1a2c6050c7192f27ec8f27ad2e41abce969c1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24f0381694561100e524cc380f1a2c6050c7192f27ec8f27ad2e41abce969c1c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfb30493a0c650f9536bba6417da0a582b1b23b6488223359856c12d1024d57f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfb30493a0c650f9536bba6417da0a582b1b23b6488223359856c12d1024d57f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xclfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:38Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:38 crc kubenswrapper[4605]: I1001 13:45:38.173252 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wgx5p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c2ca71f-4cb0-4852-927d-af69be5d77f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1180cac382ded8ae1a7be2e5738d96beceed10f750d31e36ae1520416a71e8dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7gtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wgx5p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:38Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:38 crc kubenswrapper[4605]: I1001 13:45:38.189730 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://56f2691df87d1c843a73c06c39c7ad1d5477fb1f10c6f3f797f658e7d0c0d04d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09ecc57bc8186c325ef0bd38c17882024dda3714b52f9b0295a75ed93b2d6736\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f6f39aa4d204b41067d2705ce828e79752ca4e5b7689821e34c98f5041ce925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddba08349bbb0f7040bb04f37d4b5a137de327a5c4a7a5689396a2a1925b062a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://98143f416c776bf79ee24b42b70d88d1cbe4528bd7a1c0fc886a8f6e711c6400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8442849a4d37d618e701a2ea4c1dd33944894765c874fd5635c74d37a7f411b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e41ab9ad83951f275a359365c118fdba9ea91417dc52a9cd98b0a55f84788c64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e41ab9ad83951f275a359365c118fdba9ea91417dc52a9cd98b0a55f84788c64\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T13:45:28Z\\\",\\\"message\\\":\\\"onal-cni-plugins-xclfn\\\\nI1001 13:45:28.238175 6140 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1001 13:45:28.238172 6140 lb_config.go:1031] Cluster endpoints for default/kubernetes for network=default are: map[TCP/https:{6443 [192.168.126.11] []}]\\\\nI1001 13:45:28.238207 6140 services_controller.go:443] Built service default/kubernetes LB cluster-wide configs for network=default: []services.lbConfig(nil)\\\\nI1001 13:45:28.238215 6140 services_controller.go:444] Built service default/kubernetes LB per-node configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.4.1\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:443, clusterEndpoints:services.lbEndpoints{Port:6443, V4IPs:[]string{\\\\\\\"192.168.126.11\\\\\\\"}, V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI1001 13:45:28.238228 6140 services_controller.go:445] Built service default/kubernetes LB template configs for network=default: []services.lbConfig(nil)\\\\nF1001 13:45:28.238233 6140 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handle\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:27Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-kzv4p_openshift-ovn-kubernetes(e0b90c02-c41c-4f5b-ae0a-c6444435a3ae)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d1219f1caadb9f0992fa56b158a23ace3616d178360c4690657deb351a3ccaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-kzv4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:38Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:38 crc kubenswrapper[4605]: I1001 13:45:38.215496 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:38 crc kubenswrapper[4605]: I1001 13:45:38.215546 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:38 crc kubenswrapper[4605]: I1001 13:45:38.215559 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:38 crc kubenswrapper[4605]: I1001 13:45:38.215577 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:38 crc kubenswrapper[4605]: I1001 13:45:38.215591 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:38Z","lastTransitionTime":"2025-10-01T13:45:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:38 crc kubenswrapper[4605]: I1001 13:45:38.318597 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:38 crc kubenswrapper[4605]: I1001 13:45:38.318646 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:38 crc kubenswrapper[4605]: I1001 13:45:38.318658 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:38 crc kubenswrapper[4605]: I1001 13:45:38.318676 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:38 crc kubenswrapper[4605]: I1001 13:45:38.318690 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:38Z","lastTransitionTime":"2025-10-01T13:45:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:38 crc kubenswrapper[4605]: I1001 13:45:38.422424 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:38 crc kubenswrapper[4605]: I1001 13:45:38.422534 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:38 crc kubenswrapper[4605]: I1001 13:45:38.422554 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:38 crc kubenswrapper[4605]: I1001 13:45:38.422577 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:38 crc kubenswrapper[4605]: I1001 13:45:38.422595 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:38Z","lastTransitionTime":"2025-10-01T13:45:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:38 crc kubenswrapper[4605]: I1001 13:45:38.525763 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:38 crc kubenswrapper[4605]: I1001 13:45:38.525803 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:38 crc kubenswrapper[4605]: I1001 13:45:38.525850 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:38 crc kubenswrapper[4605]: I1001 13:45:38.525866 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:38 crc kubenswrapper[4605]: I1001 13:45:38.525876 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:38Z","lastTransitionTime":"2025-10-01T13:45:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:38 crc kubenswrapper[4605]: I1001 13:45:38.628656 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:38 crc kubenswrapper[4605]: I1001 13:45:38.628701 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:38 crc kubenswrapper[4605]: I1001 13:45:38.628715 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:38 crc kubenswrapper[4605]: I1001 13:45:38.628733 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:38 crc kubenswrapper[4605]: I1001 13:45:38.628747 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:38Z","lastTransitionTime":"2025-10-01T13:45:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:38 crc kubenswrapper[4605]: I1001 13:45:38.731948 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:38 crc kubenswrapper[4605]: I1001 13:45:38.732001 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:38 crc kubenswrapper[4605]: I1001 13:45:38.732012 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:38 crc kubenswrapper[4605]: I1001 13:45:38.732029 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:38 crc kubenswrapper[4605]: I1001 13:45:38.732042 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:38Z","lastTransitionTime":"2025-10-01T13:45:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:38 crc kubenswrapper[4605]: I1001 13:45:38.835707 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:38 crc kubenswrapper[4605]: I1001 13:45:38.835762 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:38 crc kubenswrapper[4605]: I1001 13:45:38.835775 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:38 crc kubenswrapper[4605]: I1001 13:45:38.835788 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:38 crc kubenswrapper[4605]: I1001 13:45:38.835797 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:38Z","lastTransitionTime":"2025-10-01T13:45:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:38 crc kubenswrapper[4605]: I1001 13:45:38.939214 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:38 crc kubenswrapper[4605]: I1001 13:45:38.939250 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:38 crc kubenswrapper[4605]: I1001 13:45:38.939261 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:38 crc kubenswrapper[4605]: I1001 13:45:38.939278 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:38 crc kubenswrapper[4605]: I1001 13:45:38.939290 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:38Z","lastTransitionTime":"2025-10-01T13:45:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:39 crc kubenswrapper[4605]: I1001 13:45:39.042660 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:39 crc kubenswrapper[4605]: I1001 13:45:39.042730 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:39 crc kubenswrapper[4605]: I1001 13:45:39.042741 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:39 crc kubenswrapper[4605]: I1001 13:45:39.042775 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:39 crc kubenswrapper[4605]: I1001 13:45:39.042786 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:39Z","lastTransitionTime":"2025-10-01T13:45:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:39 crc kubenswrapper[4605]: I1001 13:45:39.145460 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:39 crc kubenswrapper[4605]: I1001 13:45:39.145522 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:39 crc kubenswrapper[4605]: I1001 13:45:39.145536 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:39 crc kubenswrapper[4605]: I1001 13:45:39.145556 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:39 crc kubenswrapper[4605]: I1001 13:45:39.145570 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:39Z","lastTransitionTime":"2025-10-01T13:45:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:39 crc kubenswrapper[4605]: I1001 13:45:39.248000 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:39 crc kubenswrapper[4605]: I1001 13:45:39.248081 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:39 crc kubenswrapper[4605]: I1001 13:45:39.248108 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:39 crc kubenswrapper[4605]: I1001 13:45:39.248130 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:39 crc kubenswrapper[4605]: I1001 13:45:39.248144 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:39Z","lastTransitionTime":"2025-10-01T13:45:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:39 crc kubenswrapper[4605]: I1001 13:45:39.352030 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:39 crc kubenswrapper[4605]: I1001 13:45:39.352080 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:39 crc kubenswrapper[4605]: I1001 13:45:39.352124 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:39 crc kubenswrapper[4605]: I1001 13:45:39.352146 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:39 crc kubenswrapper[4605]: I1001 13:45:39.352158 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:39Z","lastTransitionTime":"2025-10-01T13:45:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:39 crc kubenswrapper[4605]: I1001 13:45:39.455810 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:39 crc kubenswrapper[4605]: I1001 13:45:39.455860 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:39 crc kubenswrapper[4605]: I1001 13:45:39.455872 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:39 crc kubenswrapper[4605]: I1001 13:45:39.455891 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:39 crc kubenswrapper[4605]: I1001 13:45:39.455905 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:39Z","lastTransitionTime":"2025-10-01T13:45:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:39 crc kubenswrapper[4605]: I1001 13:45:39.559302 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:39 crc kubenswrapper[4605]: I1001 13:45:39.559351 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:39 crc kubenswrapper[4605]: I1001 13:45:39.559363 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:39 crc kubenswrapper[4605]: I1001 13:45:39.559381 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:39 crc kubenswrapper[4605]: I1001 13:45:39.559395 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:39Z","lastTransitionTime":"2025-10-01T13:45:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:39 crc kubenswrapper[4605]: I1001 13:45:39.663153 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:39 crc kubenswrapper[4605]: I1001 13:45:39.663273 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:39 crc kubenswrapper[4605]: I1001 13:45:39.663299 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:39 crc kubenswrapper[4605]: I1001 13:45:39.663336 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:39 crc kubenswrapper[4605]: I1001 13:45:39.663361 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:39Z","lastTransitionTime":"2025-10-01T13:45:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:39 crc kubenswrapper[4605]: I1001 13:45:39.766640 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:39 crc kubenswrapper[4605]: I1001 13:45:39.766700 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:39 crc kubenswrapper[4605]: I1001 13:45:39.766713 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:39 crc kubenswrapper[4605]: I1001 13:45:39.766735 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:39 crc kubenswrapper[4605]: I1001 13:45:39.766749 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:39Z","lastTransitionTime":"2025-10-01T13:45:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:39 crc kubenswrapper[4605]: I1001 13:45:39.869945 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:39 crc kubenswrapper[4605]: I1001 13:45:39.870007 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:39 crc kubenswrapper[4605]: I1001 13:45:39.870016 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:39 crc kubenswrapper[4605]: I1001 13:45:39.870034 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:39 crc kubenswrapper[4605]: I1001 13:45:39.870047 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:39Z","lastTransitionTime":"2025-10-01T13:45:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:39 crc kubenswrapper[4605]: I1001 13:45:39.926288 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:45:39 crc kubenswrapper[4605]: I1001 13:45:39.926402 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-m7ph7" Oct 01 13:45:39 crc kubenswrapper[4605]: I1001 13:45:39.926514 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 13:45:39 crc kubenswrapper[4605]: E1001 13:45:39.926644 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-m7ph7" podUID="8c172ce5-f64e-417d-9fc7-e06c5e443fbc" Oct 01 13:45:39 crc kubenswrapper[4605]: E1001 13:45:39.926459 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 13:45:39 crc kubenswrapper[4605]: E1001 13:45:39.926792 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 13:45:39 crc kubenswrapper[4605]: I1001 13:45:39.926858 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 13:45:39 crc kubenswrapper[4605]: E1001 13:45:39.926906 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 13:45:39 crc kubenswrapper[4605]: I1001 13:45:39.973201 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:39 crc kubenswrapper[4605]: I1001 13:45:39.973594 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:39 crc kubenswrapper[4605]: I1001 13:45:39.973727 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:39 crc kubenswrapper[4605]: I1001 13:45:39.973846 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:39 crc kubenswrapper[4605]: I1001 13:45:39.973969 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:39Z","lastTransitionTime":"2025-10-01T13:45:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:40 crc kubenswrapper[4605]: I1001 13:45:40.076649 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:40 crc kubenswrapper[4605]: I1001 13:45:40.077019 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:40 crc kubenswrapper[4605]: I1001 13:45:40.077119 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:40 crc kubenswrapper[4605]: I1001 13:45:40.077192 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:40 crc kubenswrapper[4605]: I1001 13:45:40.077257 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:40Z","lastTransitionTime":"2025-10-01T13:45:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:40 crc kubenswrapper[4605]: I1001 13:45:40.180755 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:40 crc kubenswrapper[4605]: I1001 13:45:40.180812 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:40 crc kubenswrapper[4605]: I1001 13:45:40.180823 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:40 crc kubenswrapper[4605]: I1001 13:45:40.180841 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:40 crc kubenswrapper[4605]: I1001 13:45:40.180853 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:40Z","lastTransitionTime":"2025-10-01T13:45:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:40 crc kubenswrapper[4605]: I1001 13:45:40.283893 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:40 crc kubenswrapper[4605]: I1001 13:45:40.283942 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:40 crc kubenswrapper[4605]: I1001 13:45:40.283952 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:40 crc kubenswrapper[4605]: I1001 13:45:40.283967 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:40 crc kubenswrapper[4605]: I1001 13:45:40.283988 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:40Z","lastTransitionTime":"2025-10-01T13:45:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:40 crc kubenswrapper[4605]: I1001 13:45:40.389752 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:40 crc kubenswrapper[4605]: I1001 13:45:40.389844 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:40 crc kubenswrapper[4605]: I1001 13:45:40.389861 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:40 crc kubenswrapper[4605]: I1001 13:45:40.389893 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:40 crc kubenswrapper[4605]: I1001 13:45:40.389907 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:40Z","lastTransitionTime":"2025-10-01T13:45:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:40 crc kubenswrapper[4605]: I1001 13:45:40.493362 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:40 crc kubenswrapper[4605]: I1001 13:45:40.493418 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:40 crc kubenswrapper[4605]: I1001 13:45:40.493431 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:40 crc kubenswrapper[4605]: I1001 13:45:40.493454 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:40 crc kubenswrapper[4605]: I1001 13:45:40.493469 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:40Z","lastTransitionTime":"2025-10-01T13:45:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:40 crc kubenswrapper[4605]: I1001 13:45:40.596907 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:40 crc kubenswrapper[4605]: I1001 13:45:40.596969 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:40 crc kubenswrapper[4605]: I1001 13:45:40.596980 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:40 crc kubenswrapper[4605]: I1001 13:45:40.596996 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:40 crc kubenswrapper[4605]: I1001 13:45:40.597007 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:40Z","lastTransitionTime":"2025-10-01T13:45:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:40 crc kubenswrapper[4605]: I1001 13:45:40.700297 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:40 crc kubenswrapper[4605]: I1001 13:45:40.700351 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:40 crc kubenswrapper[4605]: I1001 13:45:40.700367 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:40 crc kubenswrapper[4605]: I1001 13:45:40.700390 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:40 crc kubenswrapper[4605]: I1001 13:45:40.700409 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:40Z","lastTransitionTime":"2025-10-01T13:45:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:40 crc kubenswrapper[4605]: I1001 13:45:40.803617 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:40 crc kubenswrapper[4605]: I1001 13:45:40.803697 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:40 crc kubenswrapper[4605]: I1001 13:45:40.803726 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:40 crc kubenswrapper[4605]: I1001 13:45:40.803765 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:40 crc kubenswrapper[4605]: I1001 13:45:40.803791 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:40Z","lastTransitionTime":"2025-10-01T13:45:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:40 crc kubenswrapper[4605]: I1001 13:45:40.907059 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:40 crc kubenswrapper[4605]: I1001 13:45:40.907169 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:40 crc kubenswrapper[4605]: I1001 13:45:40.907194 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:40 crc kubenswrapper[4605]: I1001 13:45:40.907231 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:40 crc kubenswrapper[4605]: I1001 13:45:40.907259 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:40Z","lastTransitionTime":"2025-10-01T13:45:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:41 crc kubenswrapper[4605]: I1001 13:45:41.010424 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:41 crc kubenswrapper[4605]: I1001 13:45:41.010514 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:41 crc kubenswrapper[4605]: I1001 13:45:41.010588 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:41 crc kubenswrapper[4605]: I1001 13:45:41.010639 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:41 crc kubenswrapper[4605]: I1001 13:45:41.010662 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:41Z","lastTransitionTime":"2025-10-01T13:45:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:41 crc kubenswrapper[4605]: I1001 13:45:41.114251 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:41 crc kubenswrapper[4605]: I1001 13:45:41.114373 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:41 crc kubenswrapper[4605]: I1001 13:45:41.114386 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:41 crc kubenswrapper[4605]: I1001 13:45:41.114403 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:41 crc kubenswrapper[4605]: I1001 13:45:41.114414 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:41Z","lastTransitionTime":"2025-10-01T13:45:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:41 crc kubenswrapper[4605]: I1001 13:45:41.218384 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:41 crc kubenswrapper[4605]: I1001 13:45:41.218436 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:41 crc kubenswrapper[4605]: I1001 13:45:41.218446 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:41 crc kubenswrapper[4605]: I1001 13:45:41.218464 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:41 crc kubenswrapper[4605]: I1001 13:45:41.218475 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:41Z","lastTransitionTime":"2025-10-01T13:45:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:41 crc kubenswrapper[4605]: I1001 13:45:41.320877 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:41 crc kubenswrapper[4605]: I1001 13:45:41.320921 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:41 crc kubenswrapper[4605]: I1001 13:45:41.320929 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:41 crc kubenswrapper[4605]: I1001 13:45:41.320943 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:41 crc kubenswrapper[4605]: I1001 13:45:41.320952 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:41Z","lastTransitionTime":"2025-10-01T13:45:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:41 crc kubenswrapper[4605]: I1001 13:45:41.424544 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:41 crc kubenswrapper[4605]: I1001 13:45:41.424605 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:41 crc kubenswrapper[4605]: I1001 13:45:41.424623 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:41 crc kubenswrapper[4605]: I1001 13:45:41.424650 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:41 crc kubenswrapper[4605]: I1001 13:45:41.424668 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:41Z","lastTransitionTime":"2025-10-01T13:45:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:41 crc kubenswrapper[4605]: I1001 13:45:41.527194 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:41 crc kubenswrapper[4605]: I1001 13:45:41.527258 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:41 crc kubenswrapper[4605]: I1001 13:45:41.527278 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:41 crc kubenswrapper[4605]: I1001 13:45:41.527304 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:41 crc kubenswrapper[4605]: I1001 13:45:41.527323 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:41Z","lastTransitionTime":"2025-10-01T13:45:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:41 crc kubenswrapper[4605]: I1001 13:45:41.629807 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:41 crc kubenswrapper[4605]: I1001 13:45:41.629928 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:41 crc kubenswrapper[4605]: I1001 13:45:41.629957 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:41 crc kubenswrapper[4605]: I1001 13:45:41.629995 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:41 crc kubenswrapper[4605]: I1001 13:45:41.630018 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:41Z","lastTransitionTime":"2025-10-01T13:45:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:41 crc kubenswrapper[4605]: I1001 13:45:41.733036 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:41 crc kubenswrapper[4605]: I1001 13:45:41.733154 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:41 crc kubenswrapper[4605]: I1001 13:45:41.733181 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:41 crc kubenswrapper[4605]: I1001 13:45:41.733220 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:41 crc kubenswrapper[4605]: I1001 13:45:41.733246 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:41Z","lastTransitionTime":"2025-10-01T13:45:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:41 crc kubenswrapper[4605]: I1001 13:45:41.838900 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:41 crc kubenswrapper[4605]: I1001 13:45:41.838994 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:41 crc kubenswrapper[4605]: I1001 13:45:41.839007 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:41 crc kubenswrapper[4605]: I1001 13:45:41.839053 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:41 crc kubenswrapper[4605]: I1001 13:45:41.839076 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:41Z","lastTransitionTime":"2025-10-01T13:45:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:41 crc kubenswrapper[4605]: I1001 13:45:41.926231 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 13:45:41 crc kubenswrapper[4605]: E1001 13:45:41.926489 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 13:45:41 crc kubenswrapper[4605]: I1001 13:45:41.926963 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 13:45:41 crc kubenswrapper[4605]: E1001 13:45:41.927150 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 13:45:41 crc kubenswrapper[4605]: I1001 13:45:41.927465 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-m7ph7" Oct 01 13:45:41 crc kubenswrapper[4605]: E1001 13:45:41.927602 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-m7ph7" podUID="8c172ce5-f64e-417d-9fc7-e06c5e443fbc" Oct 01 13:45:41 crc kubenswrapper[4605]: I1001 13:45:41.928081 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:45:41 crc kubenswrapper[4605]: E1001 13:45:41.928237 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 13:45:41 crc kubenswrapper[4605]: I1001 13:45:41.942904 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:41 crc kubenswrapper[4605]: I1001 13:45:41.943197 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:41 crc kubenswrapper[4605]: I1001 13:45:41.943217 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:41 crc kubenswrapper[4605]: I1001 13:45:41.943247 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:41 crc kubenswrapper[4605]: I1001 13:45:41.943270 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:41Z","lastTransitionTime":"2025-10-01T13:45:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:42 crc kubenswrapper[4605]: I1001 13:45:42.047233 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:42 crc kubenswrapper[4605]: I1001 13:45:42.047482 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:42 crc kubenswrapper[4605]: I1001 13:45:42.047583 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:42 crc kubenswrapper[4605]: I1001 13:45:42.047694 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:42 crc kubenswrapper[4605]: I1001 13:45:42.047790 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:42Z","lastTransitionTime":"2025-10-01T13:45:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:42 crc kubenswrapper[4605]: I1001 13:45:42.151565 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:42 crc kubenswrapper[4605]: I1001 13:45:42.151607 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:42 crc kubenswrapper[4605]: I1001 13:45:42.151616 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:42 crc kubenswrapper[4605]: I1001 13:45:42.151629 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:42 crc kubenswrapper[4605]: I1001 13:45:42.151639 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:42Z","lastTransitionTime":"2025-10-01T13:45:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:42 crc kubenswrapper[4605]: I1001 13:45:42.254216 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:42 crc kubenswrapper[4605]: I1001 13:45:42.254273 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:42 crc kubenswrapper[4605]: I1001 13:45:42.254288 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:42 crc kubenswrapper[4605]: I1001 13:45:42.254303 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:42 crc kubenswrapper[4605]: I1001 13:45:42.254315 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:42Z","lastTransitionTime":"2025-10-01T13:45:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:42 crc kubenswrapper[4605]: I1001 13:45:42.357519 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:42 crc kubenswrapper[4605]: I1001 13:45:42.357588 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:42 crc kubenswrapper[4605]: I1001 13:45:42.357605 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:42 crc kubenswrapper[4605]: I1001 13:45:42.357634 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:42 crc kubenswrapper[4605]: I1001 13:45:42.357648 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:42Z","lastTransitionTime":"2025-10-01T13:45:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:42 crc kubenswrapper[4605]: I1001 13:45:42.460635 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:42 crc kubenswrapper[4605]: I1001 13:45:42.460698 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:42 crc kubenswrapper[4605]: I1001 13:45:42.460715 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:42 crc kubenswrapper[4605]: I1001 13:45:42.460738 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:42 crc kubenswrapper[4605]: I1001 13:45:42.460758 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:42Z","lastTransitionTime":"2025-10-01T13:45:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:42 crc kubenswrapper[4605]: I1001 13:45:42.563642 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:42 crc kubenswrapper[4605]: I1001 13:45:42.563690 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:42 crc kubenswrapper[4605]: I1001 13:45:42.563702 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:42 crc kubenswrapper[4605]: I1001 13:45:42.563722 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:42 crc kubenswrapper[4605]: I1001 13:45:42.563736 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:42Z","lastTransitionTime":"2025-10-01T13:45:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:42 crc kubenswrapper[4605]: I1001 13:45:42.667234 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:42 crc kubenswrapper[4605]: I1001 13:45:42.667287 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:42 crc kubenswrapper[4605]: I1001 13:45:42.667302 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:42 crc kubenswrapper[4605]: I1001 13:45:42.667321 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:42 crc kubenswrapper[4605]: I1001 13:45:42.667334 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:42Z","lastTransitionTime":"2025-10-01T13:45:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:42 crc kubenswrapper[4605]: I1001 13:45:42.770453 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:42 crc kubenswrapper[4605]: I1001 13:45:42.770507 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:42 crc kubenswrapper[4605]: I1001 13:45:42.770518 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:42 crc kubenswrapper[4605]: I1001 13:45:42.770536 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:42 crc kubenswrapper[4605]: I1001 13:45:42.770546 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:42Z","lastTransitionTime":"2025-10-01T13:45:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:42 crc kubenswrapper[4605]: I1001 13:45:42.872771 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:42 crc kubenswrapper[4605]: I1001 13:45:42.872834 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:42 crc kubenswrapper[4605]: I1001 13:45:42.872851 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:42 crc kubenswrapper[4605]: I1001 13:45:42.872874 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:42 crc kubenswrapper[4605]: I1001 13:45:42.872891 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:42Z","lastTransitionTime":"2025-10-01T13:45:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:42 crc kubenswrapper[4605]: I1001 13:45:42.975665 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:42 crc kubenswrapper[4605]: I1001 13:45:42.975722 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:42 crc kubenswrapper[4605]: I1001 13:45:42.975736 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:42 crc kubenswrapper[4605]: I1001 13:45:42.975757 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:42 crc kubenswrapper[4605]: I1001 13:45:42.975771 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:42Z","lastTransitionTime":"2025-10-01T13:45:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:43 crc kubenswrapper[4605]: I1001 13:45:43.078261 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:43 crc kubenswrapper[4605]: I1001 13:45:43.078314 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:43 crc kubenswrapper[4605]: I1001 13:45:43.078326 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:43 crc kubenswrapper[4605]: I1001 13:45:43.078345 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:43 crc kubenswrapper[4605]: I1001 13:45:43.078358 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:43Z","lastTransitionTime":"2025-10-01T13:45:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:43 crc kubenswrapper[4605]: I1001 13:45:43.180962 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:43 crc kubenswrapper[4605]: I1001 13:45:43.181019 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:43 crc kubenswrapper[4605]: I1001 13:45:43.181030 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:43 crc kubenswrapper[4605]: I1001 13:45:43.181045 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:43 crc kubenswrapper[4605]: I1001 13:45:43.181059 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:43Z","lastTransitionTime":"2025-10-01T13:45:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:43 crc kubenswrapper[4605]: I1001 13:45:43.285129 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:43 crc kubenswrapper[4605]: I1001 13:45:43.285178 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:43 crc kubenswrapper[4605]: I1001 13:45:43.285190 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:43 crc kubenswrapper[4605]: I1001 13:45:43.285207 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:43 crc kubenswrapper[4605]: I1001 13:45:43.285220 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:43Z","lastTransitionTime":"2025-10-01T13:45:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:43 crc kubenswrapper[4605]: I1001 13:45:43.388631 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:43 crc kubenswrapper[4605]: I1001 13:45:43.388663 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:43 crc kubenswrapper[4605]: I1001 13:45:43.388683 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:43 crc kubenswrapper[4605]: I1001 13:45:43.388698 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:43 crc kubenswrapper[4605]: I1001 13:45:43.388708 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:43Z","lastTransitionTime":"2025-10-01T13:45:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:43 crc kubenswrapper[4605]: I1001 13:45:43.491305 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:43 crc kubenswrapper[4605]: I1001 13:45:43.491588 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:43 crc kubenswrapper[4605]: I1001 13:45:43.491714 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:43 crc kubenswrapper[4605]: I1001 13:45:43.491815 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:43 crc kubenswrapper[4605]: I1001 13:45:43.491892 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:43Z","lastTransitionTime":"2025-10-01T13:45:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:43 crc kubenswrapper[4605]: I1001 13:45:43.594544 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:43 crc kubenswrapper[4605]: I1001 13:45:43.594583 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:43 crc kubenswrapper[4605]: I1001 13:45:43.594592 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:43 crc kubenswrapper[4605]: I1001 13:45:43.594606 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:43 crc kubenswrapper[4605]: I1001 13:45:43.594615 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:43Z","lastTransitionTime":"2025-10-01T13:45:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:43 crc kubenswrapper[4605]: I1001 13:45:43.696552 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:43 crc kubenswrapper[4605]: I1001 13:45:43.696587 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:43 crc kubenswrapper[4605]: I1001 13:45:43.696598 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:43 crc kubenswrapper[4605]: I1001 13:45:43.696613 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:43 crc kubenswrapper[4605]: I1001 13:45:43.696624 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:43Z","lastTransitionTime":"2025-10-01T13:45:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:43 crc kubenswrapper[4605]: I1001 13:45:43.799505 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:43 crc kubenswrapper[4605]: I1001 13:45:43.799558 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:43 crc kubenswrapper[4605]: I1001 13:45:43.799568 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:43 crc kubenswrapper[4605]: I1001 13:45:43.799589 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:43 crc kubenswrapper[4605]: I1001 13:45:43.799601 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:43Z","lastTransitionTime":"2025-10-01T13:45:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:43 crc kubenswrapper[4605]: I1001 13:45:43.902808 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:43 crc kubenswrapper[4605]: I1001 13:45:43.902898 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:43 crc kubenswrapper[4605]: I1001 13:45:43.902954 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:43 crc kubenswrapper[4605]: I1001 13:45:43.902981 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:43 crc kubenswrapper[4605]: I1001 13:45:43.903001 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:43Z","lastTransitionTime":"2025-10-01T13:45:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:43 crc kubenswrapper[4605]: I1001 13:45:43.925931 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 13:45:43 crc kubenswrapper[4605]: E1001 13:45:43.926114 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 13:45:43 crc kubenswrapper[4605]: I1001 13:45:43.926299 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:45:43 crc kubenswrapper[4605]: E1001 13:45:43.926353 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 13:45:43 crc kubenswrapper[4605]: I1001 13:45:43.926467 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 13:45:43 crc kubenswrapper[4605]: E1001 13:45:43.926516 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 13:45:43 crc kubenswrapper[4605]: I1001 13:45:43.926736 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-m7ph7" Oct 01 13:45:43 crc kubenswrapper[4605]: E1001 13:45:43.926986 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-m7ph7" podUID="8c172ce5-f64e-417d-9fc7-e06c5e443fbc" Oct 01 13:45:44 crc kubenswrapper[4605]: I1001 13:45:44.004921 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:44 crc kubenswrapper[4605]: I1001 13:45:44.004959 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:44 crc kubenswrapper[4605]: I1001 13:45:44.004970 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:44 crc kubenswrapper[4605]: I1001 13:45:44.004983 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:44 crc kubenswrapper[4605]: I1001 13:45:44.004993 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:44Z","lastTransitionTime":"2025-10-01T13:45:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:44 crc kubenswrapper[4605]: I1001 13:45:44.107961 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:44 crc kubenswrapper[4605]: I1001 13:45:44.108004 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:44 crc kubenswrapper[4605]: I1001 13:45:44.108014 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:44 crc kubenswrapper[4605]: I1001 13:45:44.108033 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:44 crc kubenswrapper[4605]: I1001 13:45:44.108044 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:44Z","lastTransitionTime":"2025-10-01T13:45:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:44 crc kubenswrapper[4605]: I1001 13:45:44.211020 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:44 crc kubenswrapper[4605]: I1001 13:45:44.211436 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:44 crc kubenswrapper[4605]: I1001 13:45:44.211526 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:44 crc kubenswrapper[4605]: I1001 13:45:44.211607 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:44 crc kubenswrapper[4605]: I1001 13:45:44.211684 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:44Z","lastTransitionTime":"2025-10-01T13:45:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:44 crc kubenswrapper[4605]: I1001 13:45:44.313785 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:44 crc kubenswrapper[4605]: I1001 13:45:44.313835 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:44 crc kubenswrapper[4605]: I1001 13:45:44.313846 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:44 crc kubenswrapper[4605]: I1001 13:45:44.313861 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:44 crc kubenswrapper[4605]: I1001 13:45:44.313873 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:44Z","lastTransitionTime":"2025-10-01T13:45:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:44 crc kubenswrapper[4605]: I1001 13:45:44.416319 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:44 crc kubenswrapper[4605]: I1001 13:45:44.416387 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:44 crc kubenswrapper[4605]: I1001 13:45:44.416399 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:44 crc kubenswrapper[4605]: I1001 13:45:44.416420 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:44 crc kubenswrapper[4605]: I1001 13:45:44.416436 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:44Z","lastTransitionTime":"2025-10-01T13:45:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:44 crc kubenswrapper[4605]: I1001 13:45:44.519544 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:44 crc kubenswrapper[4605]: I1001 13:45:44.519583 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:44 crc kubenswrapper[4605]: I1001 13:45:44.519592 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:44 crc kubenswrapper[4605]: I1001 13:45:44.519605 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:44 crc kubenswrapper[4605]: I1001 13:45:44.519614 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:44Z","lastTransitionTime":"2025-10-01T13:45:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:44 crc kubenswrapper[4605]: I1001 13:45:44.622365 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:44 crc kubenswrapper[4605]: I1001 13:45:44.622421 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:44 crc kubenswrapper[4605]: I1001 13:45:44.622432 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:44 crc kubenswrapper[4605]: I1001 13:45:44.622451 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:44 crc kubenswrapper[4605]: I1001 13:45:44.622501 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:44Z","lastTransitionTime":"2025-10-01T13:45:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:44 crc kubenswrapper[4605]: I1001 13:45:44.725159 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:44 crc kubenswrapper[4605]: I1001 13:45:44.725511 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:44 crc kubenswrapper[4605]: I1001 13:45:44.725596 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:44 crc kubenswrapper[4605]: I1001 13:45:44.725670 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:44 crc kubenswrapper[4605]: I1001 13:45:44.725743 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:44Z","lastTransitionTime":"2025-10-01T13:45:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:44 crc kubenswrapper[4605]: I1001 13:45:44.828145 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:44 crc kubenswrapper[4605]: I1001 13:45:44.828190 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:44 crc kubenswrapper[4605]: I1001 13:45:44.828201 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:44 crc kubenswrapper[4605]: I1001 13:45:44.828220 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:44 crc kubenswrapper[4605]: I1001 13:45:44.828232 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:44Z","lastTransitionTime":"2025-10-01T13:45:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:44 crc kubenswrapper[4605]: I1001 13:45:44.927450 4605 scope.go:117] "RemoveContainer" containerID="e41ab9ad83951f275a359365c118fdba9ea91417dc52a9cd98b0a55f84788c64" Oct 01 13:45:44 crc kubenswrapper[4605]: E1001 13:45:44.927803 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-kzv4p_openshift-ovn-kubernetes(e0b90c02-c41c-4f5b-ae0a-c6444435a3ae)\"" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" podUID="e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" Oct 01 13:45:44 crc kubenswrapper[4605]: I1001 13:45:44.930950 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:44 crc kubenswrapper[4605]: I1001 13:45:44.931000 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:44 crc kubenswrapper[4605]: I1001 13:45:44.931012 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:44 crc kubenswrapper[4605]: I1001 13:45:44.931031 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:44 crc kubenswrapper[4605]: I1001 13:45:44.931046 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:44Z","lastTransitionTime":"2025-10-01T13:45:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:45 crc kubenswrapper[4605]: I1001 13:45:45.033301 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:45 crc kubenswrapper[4605]: I1001 13:45:45.033335 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:45 crc kubenswrapper[4605]: I1001 13:45:45.033344 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:45 crc kubenswrapper[4605]: I1001 13:45:45.033357 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:45 crc kubenswrapper[4605]: I1001 13:45:45.033367 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:45Z","lastTransitionTime":"2025-10-01T13:45:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:45 crc kubenswrapper[4605]: I1001 13:45:45.136600 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:45 crc kubenswrapper[4605]: I1001 13:45:45.136633 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:45 crc kubenswrapper[4605]: I1001 13:45:45.136643 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:45 crc kubenswrapper[4605]: I1001 13:45:45.136657 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:45 crc kubenswrapper[4605]: I1001 13:45:45.136668 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:45Z","lastTransitionTime":"2025-10-01T13:45:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:45 crc kubenswrapper[4605]: I1001 13:45:45.238977 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:45 crc kubenswrapper[4605]: I1001 13:45:45.239024 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:45 crc kubenswrapper[4605]: I1001 13:45:45.239035 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:45 crc kubenswrapper[4605]: I1001 13:45:45.239052 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:45 crc kubenswrapper[4605]: I1001 13:45:45.239062 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:45Z","lastTransitionTime":"2025-10-01T13:45:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:45 crc kubenswrapper[4605]: I1001 13:45:45.331213 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:45 crc kubenswrapper[4605]: I1001 13:45:45.331276 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:45 crc kubenswrapper[4605]: I1001 13:45:45.331300 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:45 crc kubenswrapper[4605]: I1001 13:45:45.331323 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:45 crc kubenswrapper[4605]: I1001 13:45:45.331337 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:45Z","lastTransitionTime":"2025-10-01T13:45:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:45 crc kubenswrapper[4605]: E1001 13:45:45.344913 4605 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1d17ca42-5162-4e53-b9d0-0c11f7d91daa\\\",\\\"systemUUID\\\":\\\"1ac84113-1352-4ad6-8d32-f12829b39b5d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:45Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:45 crc kubenswrapper[4605]: I1001 13:45:45.349189 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:45 crc kubenswrapper[4605]: I1001 13:45:45.349447 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:45 crc kubenswrapper[4605]: I1001 13:45:45.349524 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:45 crc kubenswrapper[4605]: I1001 13:45:45.349608 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:45 crc kubenswrapper[4605]: I1001 13:45:45.349678 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:45Z","lastTransitionTime":"2025-10-01T13:45:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:45 crc kubenswrapper[4605]: E1001 13:45:45.364972 4605 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1d17ca42-5162-4e53-b9d0-0c11f7d91daa\\\",\\\"systemUUID\\\":\\\"1ac84113-1352-4ad6-8d32-f12829b39b5d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:45Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:45 crc kubenswrapper[4605]: I1001 13:45:45.368633 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:45 crc kubenswrapper[4605]: I1001 13:45:45.368675 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:45 crc kubenswrapper[4605]: I1001 13:45:45.368694 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:45 crc kubenswrapper[4605]: I1001 13:45:45.368716 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:45 crc kubenswrapper[4605]: I1001 13:45:45.368733 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:45Z","lastTransitionTime":"2025-10-01T13:45:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:45 crc kubenswrapper[4605]: E1001 13:45:45.382471 4605 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1d17ca42-5162-4e53-b9d0-0c11f7d91daa\\\",\\\"systemUUID\\\":\\\"1ac84113-1352-4ad6-8d32-f12829b39b5d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:45Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:45 crc kubenswrapper[4605]: I1001 13:45:45.386902 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:45 crc kubenswrapper[4605]: I1001 13:45:45.386978 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:45 crc kubenswrapper[4605]: I1001 13:45:45.386996 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:45 crc kubenswrapper[4605]: I1001 13:45:45.387019 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:45 crc kubenswrapper[4605]: I1001 13:45:45.387037 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:45Z","lastTransitionTime":"2025-10-01T13:45:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:45 crc kubenswrapper[4605]: E1001 13:45:45.401578 4605 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1d17ca42-5162-4e53-b9d0-0c11f7d91daa\\\",\\\"systemUUID\\\":\\\"1ac84113-1352-4ad6-8d32-f12829b39b5d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:45Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:45 crc kubenswrapper[4605]: I1001 13:45:45.405573 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:45 crc kubenswrapper[4605]: I1001 13:45:45.405599 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:45 crc kubenswrapper[4605]: I1001 13:45:45.405609 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:45 crc kubenswrapper[4605]: I1001 13:45:45.405624 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:45 crc kubenswrapper[4605]: I1001 13:45:45.405635 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:45Z","lastTransitionTime":"2025-10-01T13:45:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:45 crc kubenswrapper[4605]: E1001 13:45:45.417316 4605 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1d17ca42-5162-4e53-b9d0-0c11f7d91daa\\\",\\\"systemUUID\\\":\\\"1ac84113-1352-4ad6-8d32-f12829b39b5d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:45Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:45 crc kubenswrapper[4605]: E1001 13:45:45.417442 4605 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 01 13:45:45 crc kubenswrapper[4605]: I1001 13:45:45.418994 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:45 crc kubenswrapper[4605]: I1001 13:45:45.419025 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:45 crc kubenswrapper[4605]: I1001 13:45:45.419038 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:45 crc kubenswrapper[4605]: I1001 13:45:45.419060 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:45 crc kubenswrapper[4605]: I1001 13:45:45.419072 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:45Z","lastTransitionTime":"2025-10-01T13:45:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:45 crc kubenswrapper[4605]: I1001 13:45:45.522286 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:45 crc kubenswrapper[4605]: I1001 13:45:45.522328 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:45 crc kubenswrapper[4605]: I1001 13:45:45.522339 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:45 crc kubenswrapper[4605]: I1001 13:45:45.522358 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:45 crc kubenswrapper[4605]: I1001 13:45:45.522373 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:45Z","lastTransitionTime":"2025-10-01T13:45:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:45 crc kubenswrapper[4605]: I1001 13:45:45.591306 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8c172ce5-f64e-417d-9fc7-e06c5e443fbc-metrics-certs\") pod \"network-metrics-daemon-m7ph7\" (UID: \"8c172ce5-f64e-417d-9fc7-e06c5e443fbc\") " pod="openshift-multus/network-metrics-daemon-m7ph7" Oct 01 13:45:45 crc kubenswrapper[4605]: E1001 13:45:45.591443 4605 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 01 13:45:45 crc kubenswrapper[4605]: E1001 13:45:45.591505 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8c172ce5-f64e-417d-9fc7-e06c5e443fbc-metrics-certs podName:8c172ce5-f64e-417d-9fc7-e06c5e443fbc nodeName:}" failed. No retries permitted until 2025-10-01 13:46:17.59148992 +0000 UTC m=+100.335466128 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/8c172ce5-f64e-417d-9fc7-e06c5e443fbc-metrics-certs") pod "network-metrics-daemon-m7ph7" (UID: "8c172ce5-f64e-417d-9fc7-e06c5e443fbc") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 01 13:45:45 crc kubenswrapper[4605]: I1001 13:45:45.625622 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:45 crc kubenswrapper[4605]: I1001 13:45:45.625663 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:45 crc kubenswrapper[4605]: I1001 13:45:45.625674 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:45 crc kubenswrapper[4605]: I1001 13:45:45.625691 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:45 crc kubenswrapper[4605]: I1001 13:45:45.625701 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:45Z","lastTransitionTime":"2025-10-01T13:45:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:45 crc kubenswrapper[4605]: I1001 13:45:45.728945 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:45 crc kubenswrapper[4605]: I1001 13:45:45.728982 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:45 crc kubenswrapper[4605]: I1001 13:45:45.728990 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:45 crc kubenswrapper[4605]: I1001 13:45:45.729004 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:45 crc kubenswrapper[4605]: I1001 13:45:45.729016 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:45Z","lastTransitionTime":"2025-10-01T13:45:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:45 crc kubenswrapper[4605]: I1001 13:45:45.832489 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:45 crc kubenswrapper[4605]: I1001 13:45:45.832561 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:45 crc kubenswrapper[4605]: I1001 13:45:45.832579 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:45 crc kubenswrapper[4605]: I1001 13:45:45.832605 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:45 crc kubenswrapper[4605]: I1001 13:45:45.832626 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:45Z","lastTransitionTime":"2025-10-01T13:45:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:45 crc kubenswrapper[4605]: I1001 13:45:45.926338 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 13:45:45 crc kubenswrapper[4605]: I1001 13:45:45.926437 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 13:45:45 crc kubenswrapper[4605]: E1001 13:45:45.926485 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 13:45:45 crc kubenswrapper[4605]: I1001 13:45:45.926527 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-m7ph7" Oct 01 13:45:45 crc kubenswrapper[4605]: I1001 13:45:45.926532 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:45:45 crc kubenswrapper[4605]: E1001 13:45:45.926623 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 13:45:45 crc kubenswrapper[4605]: E1001 13:45:45.926699 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-m7ph7" podUID="8c172ce5-f64e-417d-9fc7-e06c5e443fbc" Oct 01 13:45:45 crc kubenswrapper[4605]: E1001 13:45:45.926760 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 13:45:45 crc kubenswrapper[4605]: I1001 13:45:45.934974 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:45 crc kubenswrapper[4605]: I1001 13:45:45.935005 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:45 crc kubenswrapper[4605]: I1001 13:45:45.935014 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:45 crc kubenswrapper[4605]: I1001 13:45:45.935025 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:45 crc kubenswrapper[4605]: I1001 13:45:45.935036 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:45Z","lastTransitionTime":"2025-10-01T13:45:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:46 crc kubenswrapper[4605]: I1001 13:45:46.041393 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:46 crc kubenswrapper[4605]: I1001 13:45:46.041460 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:46 crc kubenswrapper[4605]: I1001 13:45:46.041475 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:46 crc kubenswrapper[4605]: I1001 13:45:46.041491 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:46 crc kubenswrapper[4605]: I1001 13:45:46.041499 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:46Z","lastTransitionTime":"2025-10-01T13:45:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:46 crc kubenswrapper[4605]: I1001 13:45:46.144811 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:46 crc kubenswrapper[4605]: I1001 13:45:46.144847 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:46 crc kubenswrapper[4605]: I1001 13:45:46.144856 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:46 crc kubenswrapper[4605]: I1001 13:45:46.144869 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:46 crc kubenswrapper[4605]: I1001 13:45:46.144882 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:46Z","lastTransitionTime":"2025-10-01T13:45:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:46 crc kubenswrapper[4605]: I1001 13:45:46.247847 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:46 crc kubenswrapper[4605]: I1001 13:45:46.248002 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:46 crc kubenswrapper[4605]: I1001 13:45:46.248045 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:46 crc kubenswrapper[4605]: I1001 13:45:46.248065 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:46 crc kubenswrapper[4605]: I1001 13:45:46.248080 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:46Z","lastTransitionTime":"2025-10-01T13:45:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:46 crc kubenswrapper[4605]: I1001 13:45:46.351387 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:46 crc kubenswrapper[4605]: I1001 13:45:46.351420 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:46 crc kubenswrapper[4605]: I1001 13:45:46.351428 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:46 crc kubenswrapper[4605]: I1001 13:45:46.351442 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:46 crc kubenswrapper[4605]: I1001 13:45:46.351453 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:46Z","lastTransitionTime":"2025-10-01T13:45:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:46 crc kubenswrapper[4605]: I1001 13:45:46.453849 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:46 crc kubenswrapper[4605]: I1001 13:45:46.453913 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:46 crc kubenswrapper[4605]: I1001 13:45:46.453925 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:46 crc kubenswrapper[4605]: I1001 13:45:46.453947 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:46 crc kubenswrapper[4605]: I1001 13:45:46.453961 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:46Z","lastTransitionTime":"2025-10-01T13:45:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:46 crc kubenswrapper[4605]: I1001 13:45:46.557821 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:46 crc kubenswrapper[4605]: I1001 13:45:46.557869 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:46 crc kubenswrapper[4605]: I1001 13:45:46.557881 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:46 crc kubenswrapper[4605]: I1001 13:45:46.557900 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:46 crc kubenswrapper[4605]: I1001 13:45:46.557914 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:46Z","lastTransitionTime":"2025-10-01T13:45:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:46 crc kubenswrapper[4605]: I1001 13:45:46.660435 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:46 crc kubenswrapper[4605]: I1001 13:45:46.660530 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:46 crc kubenswrapper[4605]: I1001 13:45:46.660544 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:46 crc kubenswrapper[4605]: I1001 13:45:46.660562 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:46 crc kubenswrapper[4605]: I1001 13:45:46.660573 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:46Z","lastTransitionTime":"2025-10-01T13:45:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:46 crc kubenswrapper[4605]: I1001 13:45:46.762927 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:46 crc kubenswrapper[4605]: I1001 13:45:46.762969 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:46 crc kubenswrapper[4605]: I1001 13:45:46.762981 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:46 crc kubenswrapper[4605]: I1001 13:45:46.762998 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:46 crc kubenswrapper[4605]: I1001 13:45:46.763010 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:46Z","lastTransitionTime":"2025-10-01T13:45:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:46 crc kubenswrapper[4605]: I1001 13:45:46.865821 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:46 crc kubenswrapper[4605]: I1001 13:45:46.865882 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:46 crc kubenswrapper[4605]: I1001 13:45:46.865895 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:46 crc kubenswrapper[4605]: I1001 13:45:46.865912 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:46 crc kubenswrapper[4605]: I1001 13:45:46.865924 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:46Z","lastTransitionTime":"2025-10-01T13:45:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:46 crc kubenswrapper[4605]: I1001 13:45:46.968691 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:46 crc kubenswrapper[4605]: I1001 13:45:46.968719 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:46 crc kubenswrapper[4605]: I1001 13:45:46.968729 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:46 crc kubenswrapper[4605]: I1001 13:45:46.968743 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:46 crc kubenswrapper[4605]: I1001 13:45:46.968752 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:46Z","lastTransitionTime":"2025-10-01T13:45:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:47 crc kubenswrapper[4605]: I1001 13:45:47.071439 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:47 crc kubenswrapper[4605]: I1001 13:45:47.071473 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:47 crc kubenswrapper[4605]: I1001 13:45:47.071491 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:47 crc kubenswrapper[4605]: I1001 13:45:47.071505 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:47 crc kubenswrapper[4605]: I1001 13:45:47.071514 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:47Z","lastTransitionTime":"2025-10-01T13:45:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:47 crc kubenswrapper[4605]: I1001 13:45:47.173751 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:47 crc kubenswrapper[4605]: I1001 13:45:47.173793 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:47 crc kubenswrapper[4605]: I1001 13:45:47.173803 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:47 crc kubenswrapper[4605]: I1001 13:45:47.173816 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:47 crc kubenswrapper[4605]: I1001 13:45:47.173826 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:47Z","lastTransitionTime":"2025-10-01T13:45:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:47 crc kubenswrapper[4605]: I1001 13:45:47.277149 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:47 crc kubenswrapper[4605]: I1001 13:45:47.277201 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:47 crc kubenswrapper[4605]: I1001 13:45:47.277213 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:47 crc kubenswrapper[4605]: I1001 13:45:47.277229 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:47 crc kubenswrapper[4605]: I1001 13:45:47.277241 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:47Z","lastTransitionTime":"2025-10-01T13:45:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:47 crc kubenswrapper[4605]: I1001 13:45:47.381137 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:47 crc kubenswrapper[4605]: I1001 13:45:47.381759 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:47 crc kubenswrapper[4605]: I1001 13:45:47.381793 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:47 crc kubenswrapper[4605]: I1001 13:45:47.381814 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:47 crc kubenswrapper[4605]: I1001 13:45:47.381829 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:47Z","lastTransitionTime":"2025-10-01T13:45:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:47 crc kubenswrapper[4605]: I1001 13:45:47.484258 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:47 crc kubenswrapper[4605]: I1001 13:45:47.484313 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:47 crc kubenswrapper[4605]: I1001 13:45:47.484325 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:47 crc kubenswrapper[4605]: I1001 13:45:47.484344 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:47 crc kubenswrapper[4605]: I1001 13:45:47.484355 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:47Z","lastTransitionTime":"2025-10-01T13:45:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:47 crc kubenswrapper[4605]: I1001 13:45:47.492299 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-wgx5p_1c2ca71f-4cb0-4852-927d-af69be5d77f2/kube-multus/0.log" Oct 01 13:45:47 crc kubenswrapper[4605]: I1001 13:45:47.492352 4605 generic.go:334] "Generic (PLEG): container finished" podID="1c2ca71f-4cb0-4852-927d-af69be5d77f2" containerID="1180cac382ded8ae1a7be2e5738d96beceed10f750d31e36ae1520416a71e8dd" exitCode=1 Oct 01 13:45:47 crc kubenswrapper[4605]: I1001 13:45:47.492386 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-wgx5p" event={"ID":"1c2ca71f-4cb0-4852-927d-af69be5d77f2","Type":"ContainerDied","Data":"1180cac382ded8ae1a7be2e5738d96beceed10f750d31e36ae1520416a71e8dd"} Oct 01 13:45:47 crc kubenswrapper[4605]: I1001 13:45:47.492786 4605 scope.go:117] "RemoveContainer" containerID="1180cac382ded8ae1a7be2e5738d96beceed10f750d31e36ae1520416a71e8dd" Oct 01 13:45:47 crc kubenswrapper[4605]: I1001 13:45:47.509599 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c3d55cda00f6c5763662b3f96b4c36f1fb8c220fcd14ff3469b565deac718a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e6578a529a458eab6242d8f667520303c65bd53ceba7598d4c9680c7a93bace\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:47Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:47 crc kubenswrapper[4605]: I1001 13:45:47.523724 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:47Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:47 crc kubenswrapper[4605]: I1001 13:45:47.542659 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:47Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:47 crc kubenswrapper[4605]: I1001 13:45:47.554588 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-m7ph7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8c172ce5-f64e-417d-9fc7-e06c5e443fbc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5krq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5krq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:45:13Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-m7ph7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:47Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:47 crc kubenswrapper[4605]: I1001 13:45:47.568787 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gqbr2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a6400c9-7945-44a6-b37d-e94811fc9754\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c32729bcc338f810b05ad3ac0cd10feeeaff031cd81540b79fec759bbc2b419\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2jrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51a4fc25d3fb27e7ea7fba17367d9a0b3fefa388d306e3589ec13e3cf6e61452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2jrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:45:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-gqbr2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:47Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:47 crc kubenswrapper[4605]: I1001 13:45:47.583651 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3023060-c8ae-492b-b1cb-a418d9a8e59f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd1361d1d9cb03a3942918266a1e85d3e370eabdfa7b7b1e40971995928187b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae5b8e3f4bd159c632b04545707c7140ba6fcee21a3a3847d5e7f2b9e41b9178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdjh7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:47Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:47 crc kubenswrapper[4605]: I1001 13:45:47.592331 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:47 crc kubenswrapper[4605]: I1001 13:45:47.592386 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:47 crc kubenswrapper[4605]: I1001 13:45:47.592401 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:47 crc kubenswrapper[4605]: I1001 13:45:47.592424 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:47 crc kubenswrapper[4605]: I1001 13:45:47.592443 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:47Z","lastTransitionTime":"2025-10-01T13:45:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:47 crc kubenswrapper[4605]: I1001 13:45:47.606176 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xclfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9fc4aae-03cb-458d-83cb-1a3ab9fa9639\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f8eab67511426f65e14781420480f85ea6b0d9ab25f6846a68820a1ded0053f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://14d2a7aa9a0027ef28b5ec7b288ce0a5367c53f616ed3e75216d07c9809a86a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14d2a7aa9a0027ef28b5ec7b288ce0a5367c53f616ed3e75216d07c9809a86a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0abe21aad2433d34c4a769e87e28f3c479511a4580df607d1496ed33c85385c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0abe21aad2433d34c4a769e87e28f3c479511a4580df607d1496ed33c85385c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b870108ecd4816a8d0d3d32f4b2384246c0b4537639e9cd04d660fea3246244\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b870108ecd4816a8d0d3d32f4b2384246c0b4537639e9cd04d660fea3246244\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b96314b06953b56383a86305cf1702c5f02add621d7981a36444d2d3998cf0f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b96314b06953b56383a86305cf1702c5f02add621d7981a36444d2d3998cf0f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f0381694561100e524cc380f1a2c6050c7192f27ec8f27ad2e41abce969c1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24f0381694561100e524cc380f1a2c6050c7192f27ec8f27ad2e41abce969c1c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfb30493a0c650f9536bba6417da0a582b1b23b6488223359856c12d1024d57f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfb30493a0c650f9536bba6417da0a582b1b23b6488223359856c12d1024d57f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xclfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:47Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:47 crc kubenswrapper[4605]: I1001 13:45:47.619734 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wgx5p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c2ca71f-4cb0-4852-927d-af69be5d77f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1180cac382ded8ae1a7be2e5738d96beceed10f750d31e36ae1520416a71e8dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1180cac382ded8ae1a7be2e5738d96beceed10f750d31e36ae1520416a71e8dd\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T13:45:47Z\\\",\\\"message\\\":\\\"2025-10-01T13:45:01+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_f8cd6dc3-6a91-4298-bd9c-6dedaf40e61a\\\\n2025-10-01T13:45:01+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_f8cd6dc3-6a91-4298-bd9c-6dedaf40e61a to /host/opt/cni/bin/\\\\n2025-10-01T13:45:01Z [verbose] multus-daemon started\\\\n2025-10-01T13:45:01Z [verbose] Readiness Indicator file check\\\\n2025-10-01T13:45:46Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7gtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wgx5p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:47Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:47 crc kubenswrapper[4605]: I1001 13:45:47.641241 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://56f2691df87d1c843a73c06c39c7ad1d5477fb1f10c6f3f797f658e7d0c0d04d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09ecc57bc8186c325ef0bd38c17882024dda3714b52f9b0295a75ed93b2d6736\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f6f39aa4d204b41067d2705ce828e79752ca4e5b7689821e34c98f5041ce925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddba08349bbb0f7040bb04f37d4b5a137de327a5c4a7a5689396a2a1925b062a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://98143f416c776bf79ee24b42b70d88d1cbe4528bd7a1c0fc886a8f6e711c6400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8442849a4d37d618e701a2ea4c1dd33944894765c874fd5635c74d37a7f411b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e41ab9ad83951f275a359365c118fdba9ea91417dc52a9cd98b0a55f84788c64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e41ab9ad83951f275a359365c118fdba9ea91417dc52a9cd98b0a55f84788c64\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T13:45:28Z\\\",\\\"message\\\":\\\"onal-cni-plugins-xclfn\\\\nI1001 13:45:28.238175 6140 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1001 13:45:28.238172 6140 lb_config.go:1031] Cluster endpoints for default/kubernetes for network=default are: map[TCP/https:{6443 [192.168.126.11] []}]\\\\nI1001 13:45:28.238207 6140 services_controller.go:443] Built service default/kubernetes LB cluster-wide configs for network=default: []services.lbConfig(nil)\\\\nI1001 13:45:28.238215 6140 services_controller.go:444] Built service default/kubernetes LB per-node configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.4.1\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:443, clusterEndpoints:services.lbEndpoints{Port:6443, V4IPs:[]string{\\\\\\\"192.168.126.11\\\\\\\"}, V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI1001 13:45:28.238228 6140 services_controller.go:445] Built service default/kubernetes LB template configs for network=default: []services.lbConfig(nil)\\\\nF1001 13:45:28.238233 6140 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handle\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:27Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-kzv4p_openshift-ovn-kubernetes(e0b90c02-c41c-4f5b-ae0a-c6444435a3ae)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d1219f1caadb9f0992fa56b158a23ace3616d178360c4690657deb351a3ccaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-kzv4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:47Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:47 crc kubenswrapper[4605]: I1001 13:45:47.658869 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d3d0d59e6a31efc844c56e1ad43cd326a7b2f1844784f2814469e36394cf377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:47Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:47 crc kubenswrapper[4605]: I1001 13:45:47.670207 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64e2eda677e75448f2e7e3fd477052f3a596e6c11d745848becc6c22f133e6b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:47Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:47 crc kubenswrapper[4605]: I1001 13:45:47.680817 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6zb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83630902-b99b-4944-81a4-487e9584e0c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0762e1125229327b00202fd05bc17fd641b76f2421e20d0672b3e2d3b0f7538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jt4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6zb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:47Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:47 crc kubenswrapper[4605]: I1001 13:45:47.691553 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jvqzn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4d5a988-e2c8-47db-b738-cb43467b1bfb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17326e690c100180a8e3d1a0180522378ef995bd1f5cba52d6f147a958351118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-985lh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:45:07Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jvqzn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:47Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:47 crc kubenswrapper[4605]: I1001 13:45:47.695331 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:47 crc kubenswrapper[4605]: I1001 13:45:47.695377 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:47 crc kubenswrapper[4605]: I1001 13:45:47.695387 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:47 crc kubenswrapper[4605]: I1001 13:45:47.695405 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:47 crc kubenswrapper[4605]: I1001 13:45:47.695418 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:47Z","lastTransitionTime":"2025-10-01T13:45:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:47 crc kubenswrapper[4605]: I1001 13:45:47.705824 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:47Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:47 crc kubenswrapper[4605]: I1001 13:45:47.717678 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eda516c-c3a2-4e46-b9c2-b603ebc2d618\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c15c835868f9e04648db64fcf271a4c212d406e7bcc7cfca15167b853a02a70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d0dbfcc18f63ddc136fc049c023f326cb882adbcf1123f1ec5d4b884ae3970a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed56fd63f436435206d3574740efba75e8a94582fe128388e03b398131acd4ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80bd23f5be42dc5714bba2b0f742b8aa9fdd60540db1048054e76c00f356b240\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83498000b4c2e2ea1b498069626fa6916e9fc5e95a3ecf0136b2cb0ca8a409b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759326292\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759326291\\\\\\\\\\\\\\\" (2025-10-01 12:44:51 +0000 UTC to 2026-10-01 12:44:51 +0000 UTC (now=2025-10-01 13:44:57.347830124 +0000 UTC))\\\\\\\"\\\\nI1001 13:44:57.347872 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1001 13:44:57.347895 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1001 13:44:57.347920 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347949 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347989 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4229714179/tls.crt::/tmp/serving-cert-4229714179/tls.key\\\\\\\"\\\\nI1001 13:44:57.348126 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348139 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1001 13:44:57.348152 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348158 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1001 13:44:57.348154 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1001 13:44:57.348212 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1001 13:44:57.348222 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF1001 13:44:57.350577 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:41Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://113f3599c725570c4484ed005921c30a43db5ffec24b72907c6c7546453fe363\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:47Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:47 crc kubenswrapper[4605]: I1001 13:45:47.733724 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ca1d91d-0902-4d3a-b66a-a556b5009d8b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acf6d9c6b834cf378303c7ee6e1af3f3cde2502d8f28a6e5d3ec33deb69434b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4200a3723d31af3d800fca144949b047d3ef2960d856f286899351523593061\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://671dc002b0ca1a50b36373cbf0a8971b0f751989c9f19acedb524b09afd53517\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ab08a748b9c3040ea1af963f8ebeef630d7fb260122baba05229615424850d6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:47Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:47 crc kubenswrapper[4605]: I1001 13:45:47.746607 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e761e16e-22e8-4656-94dc-0911ae6f6f05\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c3d4d08023677ab885a54e3844be97affb4cf2b7629495bd3ff5c2ca2ea572e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb5327594e9731b96a498f5d9b0c5011693e2c7ef4af097649f9a4fce12aaf24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e397c3307f660a4316c953e5b4a6440a8fd584b7d06b514833a2514c99f5d22b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2705745b03d5e0b8fdb38b0828944ce25553b45328d0ffac4762c3007c37a376\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2705745b03d5e0b8fdb38b0828944ce25553b45328d0ffac4762c3007c37a376\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:47Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:47 crc kubenswrapper[4605]: I1001 13:45:47.797518 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:47 crc kubenswrapper[4605]: I1001 13:45:47.797551 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:47 crc kubenswrapper[4605]: I1001 13:45:47.797559 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:47 crc kubenswrapper[4605]: I1001 13:45:47.797572 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:47 crc kubenswrapper[4605]: I1001 13:45:47.797581 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:47Z","lastTransitionTime":"2025-10-01T13:45:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:47 crc kubenswrapper[4605]: I1001 13:45:47.901020 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:47 crc kubenswrapper[4605]: I1001 13:45:47.901075 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:47 crc kubenswrapper[4605]: I1001 13:45:47.901085 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:47 crc kubenswrapper[4605]: I1001 13:45:47.901136 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:47 crc kubenswrapper[4605]: I1001 13:45:47.901147 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:47Z","lastTransitionTime":"2025-10-01T13:45:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:47 crc kubenswrapper[4605]: I1001 13:45:47.927386 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 13:45:47 crc kubenswrapper[4605]: E1001 13:45:47.927695 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 13:45:47 crc kubenswrapper[4605]: I1001 13:45:47.927470 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 13:45:47 crc kubenswrapper[4605]: E1001 13:45:47.927978 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 13:45:47 crc kubenswrapper[4605]: I1001 13:45:47.927496 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:45:47 crc kubenswrapper[4605]: E1001 13:45:47.928241 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 13:45:47 crc kubenswrapper[4605]: I1001 13:45:47.927412 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-m7ph7" Oct 01 13:45:47 crc kubenswrapper[4605]: E1001 13:45:47.928578 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-m7ph7" podUID="8c172ce5-f64e-417d-9fc7-e06c5e443fbc" Oct 01 13:45:47 crc kubenswrapper[4605]: I1001 13:45:47.940629 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c3d55cda00f6c5763662b3f96b4c36f1fb8c220fcd14ff3469b565deac718a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e6578a529a458eab6242d8f667520303c65bd53ceba7598d4c9680c7a93bace\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:47Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:47 crc kubenswrapper[4605]: I1001 13:45:47.953770 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:47Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:47 crc kubenswrapper[4605]: I1001 13:45:47.966081 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:47Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:47 crc kubenswrapper[4605]: I1001 13:45:47.978006 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-m7ph7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8c172ce5-f64e-417d-9fc7-e06c5e443fbc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5krq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5krq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:45:13Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-m7ph7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:47Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:47 crc kubenswrapper[4605]: I1001 13:45:47.988236 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3023060-c8ae-492b-b1cb-a418d9a8e59f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd1361d1d9cb03a3942918266a1e85d3e370eabdfa7b7b1e40971995928187b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae5b8e3f4bd159c632b04545707c7140ba6fcee21a3a3847d5e7f2b9e41b9178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdjh7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:47Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.003193 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.003228 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.003237 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.003270 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.003282 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:48Z","lastTransitionTime":"2025-10-01T13:45:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.006045 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xclfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9fc4aae-03cb-458d-83cb-1a3ab9fa9639\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f8eab67511426f65e14781420480f85ea6b0d9ab25f6846a68820a1ded0053f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://14d2a7aa9a0027ef28b5ec7b288ce0a5367c53f616ed3e75216d07c9809a86a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14d2a7aa9a0027ef28b5ec7b288ce0a5367c53f616ed3e75216d07c9809a86a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0abe21aad2433d34c4a769e87e28f3c479511a4580df607d1496ed33c85385c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0abe21aad2433d34c4a769e87e28f3c479511a4580df607d1496ed33c85385c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b870108ecd4816a8d0d3d32f4b2384246c0b4537639e9cd04d660fea3246244\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b870108ecd4816a8d0d3d32f4b2384246c0b4537639e9cd04d660fea3246244\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b96314b06953b56383a86305cf1702c5f02add621d7981a36444d2d3998cf0f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b96314b06953b56383a86305cf1702c5f02add621d7981a36444d2d3998cf0f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f0381694561100e524cc380f1a2c6050c7192f27ec8f27ad2e41abce969c1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24f0381694561100e524cc380f1a2c6050c7192f27ec8f27ad2e41abce969c1c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfb30493a0c650f9536bba6417da0a582b1b23b6488223359856c12d1024d57f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfb30493a0c650f9536bba6417da0a582b1b23b6488223359856c12d1024d57f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xclfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:48Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.021929 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wgx5p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c2ca71f-4cb0-4852-927d-af69be5d77f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1180cac382ded8ae1a7be2e5738d96beceed10f750d31e36ae1520416a71e8dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1180cac382ded8ae1a7be2e5738d96beceed10f750d31e36ae1520416a71e8dd\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T13:45:47Z\\\",\\\"message\\\":\\\"2025-10-01T13:45:01+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_f8cd6dc3-6a91-4298-bd9c-6dedaf40e61a\\\\n2025-10-01T13:45:01+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_f8cd6dc3-6a91-4298-bd9c-6dedaf40e61a to /host/opt/cni/bin/\\\\n2025-10-01T13:45:01Z [verbose] multus-daemon started\\\\n2025-10-01T13:45:01Z [verbose] Readiness Indicator file check\\\\n2025-10-01T13:45:46Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7gtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wgx5p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:48Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.047604 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://56f2691df87d1c843a73c06c39c7ad1d5477fb1f10c6f3f797f658e7d0c0d04d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09ecc57bc8186c325ef0bd38c17882024dda3714b52f9b0295a75ed93b2d6736\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f6f39aa4d204b41067d2705ce828e79752ca4e5b7689821e34c98f5041ce925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddba08349bbb0f7040bb04f37d4b5a137de327a5c4a7a5689396a2a1925b062a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://98143f416c776bf79ee24b42b70d88d1cbe4528bd7a1c0fc886a8f6e711c6400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8442849a4d37d618e701a2ea4c1dd33944894765c874fd5635c74d37a7f411b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e41ab9ad83951f275a359365c118fdba9ea91417dc52a9cd98b0a55f84788c64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e41ab9ad83951f275a359365c118fdba9ea91417dc52a9cd98b0a55f84788c64\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T13:45:28Z\\\",\\\"message\\\":\\\"onal-cni-plugins-xclfn\\\\nI1001 13:45:28.238175 6140 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1001 13:45:28.238172 6140 lb_config.go:1031] Cluster endpoints for default/kubernetes for network=default are: map[TCP/https:{6443 [192.168.126.11] []}]\\\\nI1001 13:45:28.238207 6140 services_controller.go:443] Built service default/kubernetes LB cluster-wide configs for network=default: []services.lbConfig(nil)\\\\nI1001 13:45:28.238215 6140 services_controller.go:444] Built service default/kubernetes LB per-node configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.4.1\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:443, clusterEndpoints:services.lbEndpoints{Port:6443, V4IPs:[]string{\\\\\\\"192.168.126.11\\\\\\\"}, V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI1001 13:45:28.238228 6140 services_controller.go:445] Built service default/kubernetes LB template configs for network=default: []services.lbConfig(nil)\\\\nF1001 13:45:28.238233 6140 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handle\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:27Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-kzv4p_openshift-ovn-kubernetes(e0b90c02-c41c-4f5b-ae0a-c6444435a3ae)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d1219f1caadb9f0992fa56b158a23ace3616d178360c4690657deb351a3ccaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-kzv4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:48Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.060014 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gqbr2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a6400c9-7945-44a6-b37d-e94811fc9754\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c32729bcc338f810b05ad3ac0cd10feeeaff031cd81540b79fec759bbc2b419\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2jrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51a4fc25d3fb27e7ea7fba17367d9a0b3fefa388d306e3589ec13e3cf6e61452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2jrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:45:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-gqbr2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:48Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.072321 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d3d0d59e6a31efc844c56e1ad43cd326a7b2f1844784f2814469e36394cf377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:48Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.086159 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64e2eda677e75448f2e7e3fd477052f3a596e6c11d745848becc6c22f133e6b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:48Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.096235 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6zb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83630902-b99b-4944-81a4-487e9584e0c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0762e1125229327b00202fd05bc17fd641b76f2421e20d0672b3e2d3b0f7538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jt4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6zb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:48Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.105817 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.105849 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.105857 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.105870 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.105881 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:48Z","lastTransitionTime":"2025-10-01T13:45:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.108250 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jvqzn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4d5a988-e2c8-47db-b738-cb43467b1bfb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17326e690c100180a8e3d1a0180522378ef995bd1f5cba52d6f147a958351118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-985lh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:45:07Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jvqzn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:48Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.122778 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:48Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.138803 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eda516c-c3a2-4e46-b9c2-b603ebc2d618\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c15c835868f9e04648db64fcf271a4c212d406e7bcc7cfca15167b853a02a70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d0dbfcc18f63ddc136fc049c023f326cb882adbcf1123f1ec5d4b884ae3970a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed56fd63f436435206d3574740efba75e8a94582fe128388e03b398131acd4ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80bd23f5be42dc5714bba2b0f742b8aa9fdd60540db1048054e76c00f356b240\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83498000b4c2e2ea1b498069626fa6916e9fc5e95a3ecf0136b2cb0ca8a409b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759326292\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759326291\\\\\\\\\\\\\\\" (2025-10-01 12:44:51 +0000 UTC to 2026-10-01 12:44:51 +0000 UTC (now=2025-10-01 13:44:57.347830124 +0000 UTC))\\\\\\\"\\\\nI1001 13:44:57.347872 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1001 13:44:57.347895 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1001 13:44:57.347920 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347949 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347989 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4229714179/tls.crt::/tmp/serving-cert-4229714179/tls.key\\\\\\\"\\\\nI1001 13:44:57.348126 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348139 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1001 13:44:57.348152 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348158 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1001 13:44:57.348154 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1001 13:44:57.348212 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1001 13:44:57.348222 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF1001 13:44:57.350577 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:41Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://113f3599c725570c4484ed005921c30a43db5ffec24b72907c6c7546453fe363\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:48Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.151158 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ca1d91d-0902-4d3a-b66a-a556b5009d8b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acf6d9c6b834cf378303c7ee6e1af3f3cde2502d8f28a6e5d3ec33deb69434b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4200a3723d31af3d800fca144949b047d3ef2960d856f286899351523593061\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://671dc002b0ca1a50b36373cbf0a8971b0f751989c9f19acedb524b09afd53517\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ab08a748b9c3040ea1af963f8ebeef630d7fb260122baba05229615424850d6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:48Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.164075 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e761e16e-22e8-4656-94dc-0911ae6f6f05\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c3d4d08023677ab885a54e3844be97affb4cf2b7629495bd3ff5c2ca2ea572e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb5327594e9731b96a498f5d9b0c5011693e2c7ef4af097649f9a4fce12aaf24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e397c3307f660a4316c953e5b4a6440a8fd584b7d06b514833a2514c99f5d22b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2705745b03d5e0b8fdb38b0828944ce25553b45328d0ffac4762c3007c37a376\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2705745b03d5e0b8fdb38b0828944ce25553b45328d0ffac4762c3007c37a376\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:48Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.208146 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.208181 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.208200 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.208221 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.208232 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:48Z","lastTransitionTime":"2025-10-01T13:45:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.310770 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.310810 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.310819 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.310831 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.310841 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:48Z","lastTransitionTime":"2025-10-01T13:45:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.414213 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.414253 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.414261 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.414275 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.414285 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:48Z","lastTransitionTime":"2025-10-01T13:45:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.498003 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-wgx5p_1c2ca71f-4cb0-4852-927d-af69be5d77f2/kube-multus/0.log" Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.498076 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-wgx5p" event={"ID":"1c2ca71f-4cb0-4852-927d-af69be5d77f2","Type":"ContainerStarted","Data":"2c2a3ea80efc4b22dd64e7627c18212081f51882aa7616ba8bfa3f2b116f4bf9"} Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.515265 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xclfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9fc4aae-03cb-458d-83cb-1a3ab9fa9639\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f8eab67511426f65e14781420480f85ea6b0d9ab25f6846a68820a1ded0053f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://14d2a7aa9a0027ef28b5ec7b288ce0a5367c53f616ed3e75216d07c9809a86a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14d2a7aa9a0027ef28b5ec7b288ce0a5367c53f616ed3e75216d07c9809a86a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0abe21aad2433d34c4a769e87e28f3c479511a4580df607d1496ed33c85385c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0abe21aad2433d34c4a769e87e28f3c479511a4580df607d1496ed33c85385c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b870108ecd4816a8d0d3d32f4b2384246c0b4537639e9cd04d660fea3246244\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b870108ecd4816a8d0d3d32f4b2384246c0b4537639e9cd04d660fea3246244\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b96314b06953b56383a86305cf1702c5f02add621d7981a36444d2d3998cf0f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b96314b06953b56383a86305cf1702c5f02add621d7981a36444d2d3998cf0f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f0381694561100e524cc380f1a2c6050c7192f27ec8f27ad2e41abce969c1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24f0381694561100e524cc380f1a2c6050c7192f27ec8f27ad2e41abce969c1c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfb30493a0c650f9536bba6417da0a582b1b23b6488223359856c12d1024d57f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfb30493a0c650f9536bba6417da0a582b1b23b6488223359856c12d1024d57f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xclfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:48Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.516873 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.516933 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.516945 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.516959 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.516968 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:48Z","lastTransitionTime":"2025-10-01T13:45:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.527754 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wgx5p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c2ca71f-4cb0-4852-927d-af69be5d77f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c2a3ea80efc4b22dd64e7627c18212081f51882aa7616ba8bfa3f2b116f4bf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1180cac382ded8ae1a7be2e5738d96beceed10f750d31e36ae1520416a71e8dd\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T13:45:47Z\\\",\\\"message\\\":\\\"2025-10-01T13:45:01+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_f8cd6dc3-6a91-4298-bd9c-6dedaf40e61a\\\\n2025-10-01T13:45:01+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_f8cd6dc3-6a91-4298-bd9c-6dedaf40e61a to /host/opt/cni/bin/\\\\n2025-10-01T13:45:01Z [verbose] multus-daemon started\\\\n2025-10-01T13:45:01Z [verbose] Readiness Indicator file check\\\\n2025-10-01T13:45:46Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7gtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wgx5p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:48Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.548263 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://56f2691df87d1c843a73c06c39c7ad1d5477fb1f10c6f3f797f658e7d0c0d04d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09ecc57bc8186c325ef0bd38c17882024dda3714b52f9b0295a75ed93b2d6736\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f6f39aa4d204b41067d2705ce828e79752ca4e5b7689821e34c98f5041ce925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddba08349bbb0f7040bb04f37d4b5a137de327a5c4a7a5689396a2a1925b062a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://98143f416c776bf79ee24b42b70d88d1cbe4528bd7a1c0fc886a8f6e711c6400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8442849a4d37d618e701a2ea4c1dd33944894765c874fd5635c74d37a7f411b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e41ab9ad83951f275a359365c118fdba9ea91417dc52a9cd98b0a55f84788c64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e41ab9ad83951f275a359365c118fdba9ea91417dc52a9cd98b0a55f84788c64\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T13:45:28Z\\\",\\\"message\\\":\\\"onal-cni-plugins-xclfn\\\\nI1001 13:45:28.238175 6140 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1001 13:45:28.238172 6140 lb_config.go:1031] Cluster endpoints for default/kubernetes for network=default are: map[TCP/https:{6443 [192.168.126.11] []}]\\\\nI1001 13:45:28.238207 6140 services_controller.go:443] Built service default/kubernetes LB cluster-wide configs for network=default: []services.lbConfig(nil)\\\\nI1001 13:45:28.238215 6140 services_controller.go:444] Built service default/kubernetes LB per-node configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.4.1\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:443, clusterEndpoints:services.lbEndpoints{Port:6443, V4IPs:[]string{\\\\\\\"192.168.126.11\\\\\\\"}, V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI1001 13:45:28.238228 6140 services_controller.go:445] Built service default/kubernetes LB template configs for network=default: []services.lbConfig(nil)\\\\nF1001 13:45:28.238233 6140 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handle\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:27Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-kzv4p_openshift-ovn-kubernetes(e0b90c02-c41c-4f5b-ae0a-c6444435a3ae)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d1219f1caadb9f0992fa56b158a23ace3616d178360c4690657deb351a3ccaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-kzv4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:48Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.560308 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gqbr2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a6400c9-7945-44a6-b37d-e94811fc9754\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c32729bcc338f810b05ad3ac0cd10feeeaff031cd81540b79fec759bbc2b419\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2jrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51a4fc25d3fb27e7ea7fba17367d9a0b3fefa388d306e3589ec13e3cf6e61452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2jrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:45:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-gqbr2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:48Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.571480 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3023060-c8ae-492b-b1cb-a418d9a8e59f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd1361d1d9cb03a3942918266a1e85d3e370eabdfa7b7b1e40971995928187b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae5b8e3f4bd159c632b04545707c7140ba6fcee21a3a3847d5e7f2b9e41b9178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdjh7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:48Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.583037 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64e2eda677e75448f2e7e3fd477052f3a596e6c11d745848becc6c22f133e6b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:48Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.592509 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6zb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83630902-b99b-4944-81a4-487e9584e0c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0762e1125229327b00202fd05bc17fd641b76f2421e20d0672b3e2d3b0f7538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jt4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6zb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:48Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.602605 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jvqzn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4d5a988-e2c8-47db-b738-cb43467b1bfb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17326e690c100180a8e3d1a0180522378ef995bd1f5cba52d6f147a958351118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-985lh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:45:07Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jvqzn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:48Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.613740 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d3d0d59e6a31efc844c56e1ad43cd326a7b2f1844784f2814469e36394cf377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:48Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.619357 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.619390 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.619399 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.619414 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.619426 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:48Z","lastTransitionTime":"2025-10-01T13:45:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.625723 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eda516c-c3a2-4e46-b9c2-b603ebc2d618\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c15c835868f9e04648db64fcf271a4c212d406e7bcc7cfca15167b853a02a70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d0dbfcc18f63ddc136fc049c023f326cb882adbcf1123f1ec5d4b884ae3970a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed56fd63f436435206d3574740efba75e8a94582fe128388e03b398131acd4ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80bd23f5be42dc5714bba2b0f742b8aa9fdd60540db1048054e76c00f356b240\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83498000b4c2e2ea1b498069626fa6916e9fc5e95a3ecf0136b2cb0ca8a409b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759326292\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759326291\\\\\\\\\\\\\\\" (2025-10-01 12:44:51 +0000 UTC to 2026-10-01 12:44:51 +0000 UTC (now=2025-10-01 13:44:57.347830124 +0000 UTC))\\\\\\\"\\\\nI1001 13:44:57.347872 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1001 13:44:57.347895 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1001 13:44:57.347920 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347949 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347989 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4229714179/tls.crt::/tmp/serving-cert-4229714179/tls.key\\\\\\\"\\\\nI1001 13:44:57.348126 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348139 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1001 13:44:57.348152 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348158 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1001 13:44:57.348154 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1001 13:44:57.348212 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1001 13:44:57.348222 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF1001 13:44:57.350577 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:41Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://113f3599c725570c4484ed005921c30a43db5ffec24b72907c6c7546453fe363\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:48Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.636845 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ca1d91d-0902-4d3a-b66a-a556b5009d8b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acf6d9c6b834cf378303c7ee6e1af3f3cde2502d8f28a6e5d3ec33deb69434b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4200a3723d31af3d800fca144949b047d3ef2960d856f286899351523593061\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://671dc002b0ca1a50b36373cbf0a8971b0f751989c9f19acedb524b09afd53517\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ab08a748b9c3040ea1af963f8ebeef630d7fb260122baba05229615424850d6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:48Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.646448 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e761e16e-22e8-4656-94dc-0911ae6f6f05\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c3d4d08023677ab885a54e3844be97affb4cf2b7629495bd3ff5c2ca2ea572e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb5327594e9731b96a498f5d9b0c5011693e2c7ef4af097649f9a4fce12aaf24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e397c3307f660a4316c953e5b4a6440a8fd584b7d06b514833a2514c99f5d22b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2705745b03d5e0b8fdb38b0828944ce25553b45328d0ffac4762c3007c37a376\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2705745b03d5e0b8fdb38b0828944ce25553b45328d0ffac4762c3007c37a376\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:48Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.656380 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:48Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.667380 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:48Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.679850 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:48Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.691864 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-m7ph7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8c172ce5-f64e-417d-9fc7-e06c5e443fbc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5krq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5krq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:45:13Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-m7ph7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:48Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.706205 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c3d55cda00f6c5763662b3f96b4c36f1fb8c220fcd14ff3469b565deac718a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e6578a529a458eab6242d8f667520303c65bd53ceba7598d4c9680c7a93bace\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:48Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.721969 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.721997 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.722024 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.722045 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.722057 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:48Z","lastTransitionTime":"2025-10-01T13:45:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.824500 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.824553 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.824561 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.824609 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.824633 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:48Z","lastTransitionTime":"2025-10-01T13:45:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.926763 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.926806 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.926837 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.926856 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:48 crc kubenswrapper[4605]: I1001 13:45:48.926868 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:48Z","lastTransitionTime":"2025-10-01T13:45:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:49 crc kubenswrapper[4605]: I1001 13:45:49.029324 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:49 crc kubenswrapper[4605]: I1001 13:45:49.029354 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:49 crc kubenswrapper[4605]: I1001 13:45:49.029362 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:49 crc kubenswrapper[4605]: I1001 13:45:49.029375 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:49 crc kubenswrapper[4605]: I1001 13:45:49.029384 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:49Z","lastTransitionTime":"2025-10-01T13:45:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:49 crc kubenswrapper[4605]: I1001 13:45:49.131863 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:49 crc kubenswrapper[4605]: I1001 13:45:49.132072 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:49 crc kubenswrapper[4605]: I1001 13:45:49.132112 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:49 crc kubenswrapper[4605]: I1001 13:45:49.132135 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:49 crc kubenswrapper[4605]: I1001 13:45:49.132150 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:49Z","lastTransitionTime":"2025-10-01T13:45:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:49 crc kubenswrapper[4605]: I1001 13:45:49.234152 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:49 crc kubenswrapper[4605]: I1001 13:45:49.234186 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:49 crc kubenswrapper[4605]: I1001 13:45:49.234196 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:49 crc kubenswrapper[4605]: I1001 13:45:49.234210 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:49 crc kubenswrapper[4605]: I1001 13:45:49.234219 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:49Z","lastTransitionTime":"2025-10-01T13:45:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:49 crc kubenswrapper[4605]: I1001 13:45:49.336216 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:49 crc kubenswrapper[4605]: I1001 13:45:49.336243 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:49 crc kubenswrapper[4605]: I1001 13:45:49.336251 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:49 crc kubenswrapper[4605]: I1001 13:45:49.336264 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:49 crc kubenswrapper[4605]: I1001 13:45:49.336273 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:49Z","lastTransitionTime":"2025-10-01T13:45:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:49 crc kubenswrapper[4605]: I1001 13:45:49.438353 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:49 crc kubenswrapper[4605]: I1001 13:45:49.438388 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:49 crc kubenswrapper[4605]: I1001 13:45:49.438398 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:49 crc kubenswrapper[4605]: I1001 13:45:49.438415 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:49 crc kubenswrapper[4605]: I1001 13:45:49.438428 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:49Z","lastTransitionTime":"2025-10-01T13:45:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:49 crc kubenswrapper[4605]: I1001 13:45:49.540488 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:49 crc kubenswrapper[4605]: I1001 13:45:49.540541 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:49 crc kubenswrapper[4605]: I1001 13:45:49.540559 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:49 crc kubenswrapper[4605]: I1001 13:45:49.540578 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:49 crc kubenswrapper[4605]: I1001 13:45:49.540593 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:49Z","lastTransitionTime":"2025-10-01T13:45:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:49 crc kubenswrapper[4605]: I1001 13:45:49.643166 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:49 crc kubenswrapper[4605]: I1001 13:45:49.643227 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:49 crc kubenswrapper[4605]: I1001 13:45:49.643237 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:49 crc kubenswrapper[4605]: I1001 13:45:49.643254 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:49 crc kubenswrapper[4605]: I1001 13:45:49.643264 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:49Z","lastTransitionTime":"2025-10-01T13:45:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:49 crc kubenswrapper[4605]: I1001 13:45:49.747107 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:49 crc kubenswrapper[4605]: I1001 13:45:49.747477 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:49 crc kubenswrapper[4605]: I1001 13:45:49.747542 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:49 crc kubenswrapper[4605]: I1001 13:45:49.747632 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:49 crc kubenswrapper[4605]: I1001 13:45:49.747699 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:49Z","lastTransitionTime":"2025-10-01T13:45:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:49 crc kubenswrapper[4605]: I1001 13:45:49.850431 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:49 crc kubenswrapper[4605]: I1001 13:45:49.850466 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:49 crc kubenswrapper[4605]: I1001 13:45:49.850476 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:49 crc kubenswrapper[4605]: I1001 13:45:49.850492 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:49 crc kubenswrapper[4605]: I1001 13:45:49.850502 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:49Z","lastTransitionTime":"2025-10-01T13:45:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:49 crc kubenswrapper[4605]: I1001 13:45:49.926394 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 13:45:49 crc kubenswrapper[4605]: I1001 13:45:49.926461 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-m7ph7" Oct 01 13:45:49 crc kubenswrapper[4605]: I1001 13:45:49.926425 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 13:45:49 crc kubenswrapper[4605]: I1001 13:45:49.926394 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:45:49 crc kubenswrapper[4605]: E1001 13:45:49.926661 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 13:45:49 crc kubenswrapper[4605]: E1001 13:45:49.926800 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 13:45:49 crc kubenswrapper[4605]: E1001 13:45:49.926904 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-m7ph7" podUID="8c172ce5-f64e-417d-9fc7-e06c5e443fbc" Oct 01 13:45:49 crc kubenswrapper[4605]: E1001 13:45:49.926956 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 13:45:49 crc kubenswrapper[4605]: I1001 13:45:49.953402 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:49 crc kubenswrapper[4605]: I1001 13:45:49.953468 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:49 crc kubenswrapper[4605]: I1001 13:45:49.953486 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:49 crc kubenswrapper[4605]: I1001 13:45:49.953509 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:49 crc kubenswrapper[4605]: I1001 13:45:49.953529 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:49Z","lastTransitionTime":"2025-10-01T13:45:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:50 crc kubenswrapper[4605]: I1001 13:45:50.057535 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:50 crc kubenswrapper[4605]: I1001 13:45:50.057599 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:50 crc kubenswrapper[4605]: I1001 13:45:50.057612 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:50 crc kubenswrapper[4605]: I1001 13:45:50.057632 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:50 crc kubenswrapper[4605]: I1001 13:45:50.057650 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:50Z","lastTransitionTime":"2025-10-01T13:45:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:50 crc kubenswrapper[4605]: I1001 13:45:50.160876 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:50 crc kubenswrapper[4605]: I1001 13:45:50.160939 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:50 crc kubenswrapper[4605]: I1001 13:45:50.160952 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:50 crc kubenswrapper[4605]: I1001 13:45:50.160968 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:50 crc kubenswrapper[4605]: I1001 13:45:50.160979 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:50Z","lastTransitionTime":"2025-10-01T13:45:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:50 crc kubenswrapper[4605]: I1001 13:45:50.263849 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:50 crc kubenswrapper[4605]: I1001 13:45:50.263923 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:50 crc kubenswrapper[4605]: I1001 13:45:50.263961 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:50 crc kubenswrapper[4605]: I1001 13:45:50.263979 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:50 crc kubenswrapper[4605]: I1001 13:45:50.263992 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:50Z","lastTransitionTime":"2025-10-01T13:45:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:50 crc kubenswrapper[4605]: I1001 13:45:50.366637 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:50 crc kubenswrapper[4605]: I1001 13:45:50.366685 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:50 crc kubenswrapper[4605]: I1001 13:45:50.366696 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:50 crc kubenswrapper[4605]: I1001 13:45:50.366714 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:50 crc kubenswrapper[4605]: I1001 13:45:50.366729 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:50Z","lastTransitionTime":"2025-10-01T13:45:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:50 crc kubenswrapper[4605]: I1001 13:45:50.469218 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:50 crc kubenswrapper[4605]: I1001 13:45:50.470013 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:50 crc kubenswrapper[4605]: I1001 13:45:50.470173 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:50 crc kubenswrapper[4605]: I1001 13:45:50.470311 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:50 crc kubenswrapper[4605]: I1001 13:45:50.470406 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:50Z","lastTransitionTime":"2025-10-01T13:45:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:50 crc kubenswrapper[4605]: I1001 13:45:50.573971 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:50 crc kubenswrapper[4605]: I1001 13:45:50.574016 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:50 crc kubenswrapper[4605]: I1001 13:45:50.574027 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:50 crc kubenswrapper[4605]: I1001 13:45:50.574043 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:50 crc kubenswrapper[4605]: I1001 13:45:50.574056 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:50Z","lastTransitionTime":"2025-10-01T13:45:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:50 crc kubenswrapper[4605]: I1001 13:45:50.677331 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:50 crc kubenswrapper[4605]: I1001 13:45:50.677371 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:50 crc kubenswrapper[4605]: I1001 13:45:50.677383 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:50 crc kubenswrapper[4605]: I1001 13:45:50.677402 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:50 crc kubenswrapper[4605]: I1001 13:45:50.677415 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:50Z","lastTransitionTime":"2025-10-01T13:45:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:50 crc kubenswrapper[4605]: I1001 13:45:50.780815 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:50 crc kubenswrapper[4605]: I1001 13:45:50.780875 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:50 crc kubenswrapper[4605]: I1001 13:45:50.780885 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:50 crc kubenswrapper[4605]: I1001 13:45:50.780903 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:50 crc kubenswrapper[4605]: I1001 13:45:50.780936 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:50Z","lastTransitionTime":"2025-10-01T13:45:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:50 crc kubenswrapper[4605]: I1001 13:45:50.884055 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:50 crc kubenswrapper[4605]: I1001 13:45:50.884089 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:50 crc kubenswrapper[4605]: I1001 13:45:50.884116 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:50 crc kubenswrapper[4605]: I1001 13:45:50.884130 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:50 crc kubenswrapper[4605]: I1001 13:45:50.884140 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:50Z","lastTransitionTime":"2025-10-01T13:45:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:50 crc kubenswrapper[4605]: I1001 13:45:50.987561 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:50 crc kubenswrapper[4605]: I1001 13:45:50.987617 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:50 crc kubenswrapper[4605]: I1001 13:45:50.987631 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:50 crc kubenswrapper[4605]: I1001 13:45:50.987651 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:50 crc kubenswrapper[4605]: I1001 13:45:50.987663 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:50Z","lastTransitionTime":"2025-10-01T13:45:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:51 crc kubenswrapper[4605]: I1001 13:45:51.090294 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:51 crc kubenswrapper[4605]: I1001 13:45:51.090603 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:51 crc kubenswrapper[4605]: I1001 13:45:51.090688 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:51 crc kubenswrapper[4605]: I1001 13:45:51.090785 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:51 crc kubenswrapper[4605]: I1001 13:45:51.090853 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:51Z","lastTransitionTime":"2025-10-01T13:45:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:51 crc kubenswrapper[4605]: I1001 13:45:51.193156 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:51 crc kubenswrapper[4605]: I1001 13:45:51.193202 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:51 crc kubenswrapper[4605]: I1001 13:45:51.193212 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:51 crc kubenswrapper[4605]: I1001 13:45:51.193228 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:51 crc kubenswrapper[4605]: I1001 13:45:51.193243 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:51Z","lastTransitionTime":"2025-10-01T13:45:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:51 crc kubenswrapper[4605]: I1001 13:45:51.296142 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:51 crc kubenswrapper[4605]: I1001 13:45:51.296194 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:51 crc kubenswrapper[4605]: I1001 13:45:51.296208 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:51 crc kubenswrapper[4605]: I1001 13:45:51.296229 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:51 crc kubenswrapper[4605]: I1001 13:45:51.296241 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:51Z","lastTransitionTime":"2025-10-01T13:45:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:51 crc kubenswrapper[4605]: I1001 13:45:51.399168 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:51 crc kubenswrapper[4605]: I1001 13:45:51.399235 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:51 crc kubenswrapper[4605]: I1001 13:45:51.399248 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:51 crc kubenswrapper[4605]: I1001 13:45:51.399270 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:51 crc kubenswrapper[4605]: I1001 13:45:51.399284 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:51Z","lastTransitionTime":"2025-10-01T13:45:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:51 crc kubenswrapper[4605]: I1001 13:45:51.502050 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:51 crc kubenswrapper[4605]: I1001 13:45:51.502754 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:51 crc kubenswrapper[4605]: I1001 13:45:51.502831 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:51 crc kubenswrapper[4605]: I1001 13:45:51.502905 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:51 crc kubenswrapper[4605]: I1001 13:45:51.502967 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:51Z","lastTransitionTime":"2025-10-01T13:45:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:51 crc kubenswrapper[4605]: I1001 13:45:51.605671 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:51 crc kubenswrapper[4605]: I1001 13:45:51.605744 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:51 crc kubenswrapper[4605]: I1001 13:45:51.605757 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:51 crc kubenswrapper[4605]: I1001 13:45:51.605775 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:51 crc kubenswrapper[4605]: I1001 13:45:51.605814 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:51Z","lastTransitionTime":"2025-10-01T13:45:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:51 crc kubenswrapper[4605]: I1001 13:45:51.708536 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:51 crc kubenswrapper[4605]: I1001 13:45:51.708580 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:51 crc kubenswrapper[4605]: I1001 13:45:51.708592 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:51 crc kubenswrapper[4605]: I1001 13:45:51.708613 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:51 crc kubenswrapper[4605]: I1001 13:45:51.708626 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:51Z","lastTransitionTime":"2025-10-01T13:45:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:51 crc kubenswrapper[4605]: I1001 13:45:51.811545 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:51 crc kubenswrapper[4605]: I1001 13:45:51.811588 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:51 crc kubenswrapper[4605]: I1001 13:45:51.811597 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:51 crc kubenswrapper[4605]: I1001 13:45:51.811611 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:51 crc kubenswrapper[4605]: I1001 13:45:51.811623 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:51Z","lastTransitionTime":"2025-10-01T13:45:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:51 crc kubenswrapper[4605]: I1001 13:45:51.914038 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:51 crc kubenswrapper[4605]: I1001 13:45:51.914085 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:51 crc kubenswrapper[4605]: I1001 13:45:51.914108 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:51 crc kubenswrapper[4605]: I1001 13:45:51.914120 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:51 crc kubenswrapper[4605]: I1001 13:45:51.914129 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:51Z","lastTransitionTime":"2025-10-01T13:45:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:51 crc kubenswrapper[4605]: I1001 13:45:51.926288 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 13:45:51 crc kubenswrapper[4605]: E1001 13:45:51.926390 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 13:45:51 crc kubenswrapper[4605]: I1001 13:45:51.926399 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:45:51 crc kubenswrapper[4605]: I1001 13:45:51.926468 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-m7ph7" Oct 01 13:45:51 crc kubenswrapper[4605]: I1001 13:45:51.926481 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 13:45:51 crc kubenswrapper[4605]: E1001 13:45:51.926593 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 13:45:51 crc kubenswrapper[4605]: E1001 13:45:51.926711 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-m7ph7" podUID="8c172ce5-f64e-417d-9fc7-e06c5e443fbc" Oct 01 13:45:51 crc kubenswrapper[4605]: E1001 13:45:51.926833 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 13:45:52 crc kubenswrapper[4605]: I1001 13:45:52.016803 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:52 crc kubenswrapper[4605]: I1001 13:45:52.016831 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:52 crc kubenswrapper[4605]: I1001 13:45:52.016839 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:52 crc kubenswrapper[4605]: I1001 13:45:52.016852 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:52 crc kubenswrapper[4605]: I1001 13:45:52.016861 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:52Z","lastTransitionTime":"2025-10-01T13:45:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:52 crc kubenswrapper[4605]: I1001 13:45:52.120067 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:52 crc kubenswrapper[4605]: I1001 13:45:52.120154 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:52 crc kubenswrapper[4605]: I1001 13:45:52.120165 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:52 crc kubenswrapper[4605]: I1001 13:45:52.120182 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:52 crc kubenswrapper[4605]: I1001 13:45:52.120209 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:52Z","lastTransitionTime":"2025-10-01T13:45:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:52 crc kubenswrapper[4605]: I1001 13:45:52.222730 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:52 crc kubenswrapper[4605]: I1001 13:45:52.222766 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:52 crc kubenswrapper[4605]: I1001 13:45:52.222777 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:52 crc kubenswrapper[4605]: I1001 13:45:52.222791 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:52 crc kubenswrapper[4605]: I1001 13:45:52.222802 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:52Z","lastTransitionTime":"2025-10-01T13:45:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:52 crc kubenswrapper[4605]: I1001 13:45:52.324854 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:52 crc kubenswrapper[4605]: I1001 13:45:52.324899 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:52 crc kubenswrapper[4605]: I1001 13:45:52.324911 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:52 crc kubenswrapper[4605]: I1001 13:45:52.324927 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:52 crc kubenswrapper[4605]: I1001 13:45:52.324941 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:52Z","lastTransitionTime":"2025-10-01T13:45:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:52 crc kubenswrapper[4605]: I1001 13:45:52.427860 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:52 crc kubenswrapper[4605]: I1001 13:45:52.427894 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:52 crc kubenswrapper[4605]: I1001 13:45:52.427905 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:52 crc kubenswrapper[4605]: I1001 13:45:52.427920 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:52 crc kubenswrapper[4605]: I1001 13:45:52.427932 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:52Z","lastTransitionTime":"2025-10-01T13:45:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:52 crc kubenswrapper[4605]: I1001 13:45:52.531339 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:52 crc kubenswrapper[4605]: I1001 13:45:52.531377 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:52 crc kubenswrapper[4605]: I1001 13:45:52.531386 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:52 crc kubenswrapper[4605]: I1001 13:45:52.531400 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:52 crc kubenswrapper[4605]: I1001 13:45:52.531411 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:52Z","lastTransitionTime":"2025-10-01T13:45:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:52 crc kubenswrapper[4605]: I1001 13:45:52.634233 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:52 crc kubenswrapper[4605]: I1001 13:45:52.634299 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:52 crc kubenswrapper[4605]: I1001 13:45:52.634318 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:52 crc kubenswrapper[4605]: I1001 13:45:52.634346 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:52 crc kubenswrapper[4605]: I1001 13:45:52.634365 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:52Z","lastTransitionTime":"2025-10-01T13:45:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:52 crc kubenswrapper[4605]: I1001 13:45:52.737165 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:52 crc kubenswrapper[4605]: I1001 13:45:52.737211 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:52 crc kubenswrapper[4605]: I1001 13:45:52.737222 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:52 crc kubenswrapper[4605]: I1001 13:45:52.737238 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:52 crc kubenswrapper[4605]: I1001 13:45:52.737247 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:52Z","lastTransitionTime":"2025-10-01T13:45:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:52 crc kubenswrapper[4605]: I1001 13:45:52.840470 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:52 crc kubenswrapper[4605]: I1001 13:45:52.840801 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:52 crc kubenswrapper[4605]: I1001 13:45:52.840890 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:52 crc kubenswrapper[4605]: I1001 13:45:52.840991 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:52 crc kubenswrapper[4605]: I1001 13:45:52.841144 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:52Z","lastTransitionTime":"2025-10-01T13:45:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:52 crc kubenswrapper[4605]: I1001 13:45:52.944140 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:52 crc kubenswrapper[4605]: I1001 13:45:52.944189 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:52 crc kubenswrapper[4605]: I1001 13:45:52.944199 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:52 crc kubenswrapper[4605]: I1001 13:45:52.944214 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:52 crc kubenswrapper[4605]: I1001 13:45:52.944225 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:52Z","lastTransitionTime":"2025-10-01T13:45:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:53 crc kubenswrapper[4605]: I1001 13:45:53.047179 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:53 crc kubenswrapper[4605]: I1001 13:45:53.047251 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:53 crc kubenswrapper[4605]: I1001 13:45:53.047279 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:53 crc kubenswrapper[4605]: I1001 13:45:53.047314 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:53 crc kubenswrapper[4605]: I1001 13:45:53.047338 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:53Z","lastTransitionTime":"2025-10-01T13:45:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:53 crc kubenswrapper[4605]: I1001 13:45:53.149780 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:53 crc kubenswrapper[4605]: I1001 13:45:53.149833 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:53 crc kubenswrapper[4605]: I1001 13:45:53.149848 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:53 crc kubenswrapper[4605]: I1001 13:45:53.149866 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:53 crc kubenswrapper[4605]: I1001 13:45:53.149878 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:53Z","lastTransitionTime":"2025-10-01T13:45:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:53 crc kubenswrapper[4605]: I1001 13:45:53.251964 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:53 crc kubenswrapper[4605]: I1001 13:45:53.252014 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:53 crc kubenswrapper[4605]: I1001 13:45:53.252024 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:53 crc kubenswrapper[4605]: I1001 13:45:53.252040 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:53 crc kubenswrapper[4605]: I1001 13:45:53.252052 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:53Z","lastTransitionTime":"2025-10-01T13:45:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:53 crc kubenswrapper[4605]: I1001 13:45:53.355524 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:53 crc kubenswrapper[4605]: I1001 13:45:53.355626 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:53 crc kubenswrapper[4605]: I1001 13:45:53.355648 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:53 crc kubenswrapper[4605]: I1001 13:45:53.355677 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:53 crc kubenswrapper[4605]: I1001 13:45:53.355827 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:53Z","lastTransitionTime":"2025-10-01T13:45:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:53 crc kubenswrapper[4605]: I1001 13:45:53.459143 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:53 crc kubenswrapper[4605]: I1001 13:45:53.459188 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:53 crc kubenswrapper[4605]: I1001 13:45:53.459199 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:53 crc kubenswrapper[4605]: I1001 13:45:53.459214 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:53 crc kubenswrapper[4605]: I1001 13:45:53.459224 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:53Z","lastTransitionTime":"2025-10-01T13:45:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:53 crc kubenswrapper[4605]: I1001 13:45:53.562112 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:53 crc kubenswrapper[4605]: I1001 13:45:53.562159 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:53 crc kubenswrapper[4605]: I1001 13:45:53.562175 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:53 crc kubenswrapper[4605]: I1001 13:45:53.562193 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:53 crc kubenswrapper[4605]: I1001 13:45:53.562207 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:53Z","lastTransitionTime":"2025-10-01T13:45:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:53 crc kubenswrapper[4605]: I1001 13:45:53.665616 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:53 crc kubenswrapper[4605]: I1001 13:45:53.665673 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:53 crc kubenswrapper[4605]: I1001 13:45:53.665685 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:53 crc kubenswrapper[4605]: I1001 13:45:53.665703 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:53 crc kubenswrapper[4605]: I1001 13:45:53.665719 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:53Z","lastTransitionTime":"2025-10-01T13:45:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:53 crc kubenswrapper[4605]: I1001 13:45:53.768017 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:53 crc kubenswrapper[4605]: I1001 13:45:53.768059 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:53 crc kubenswrapper[4605]: I1001 13:45:53.768068 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:53 crc kubenswrapper[4605]: I1001 13:45:53.768082 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:53 crc kubenswrapper[4605]: I1001 13:45:53.768109 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:53Z","lastTransitionTime":"2025-10-01T13:45:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:53 crc kubenswrapper[4605]: I1001 13:45:53.870613 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:53 crc kubenswrapper[4605]: I1001 13:45:53.870647 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:53 crc kubenswrapper[4605]: I1001 13:45:53.870657 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:53 crc kubenswrapper[4605]: I1001 13:45:53.870670 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:53 crc kubenswrapper[4605]: I1001 13:45:53.870681 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:53Z","lastTransitionTime":"2025-10-01T13:45:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:53 crc kubenswrapper[4605]: I1001 13:45:53.926275 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-m7ph7" Oct 01 13:45:53 crc kubenswrapper[4605]: I1001 13:45:53.926315 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 13:45:53 crc kubenswrapper[4605]: I1001 13:45:53.926283 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 13:45:53 crc kubenswrapper[4605]: E1001 13:45:53.926390 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-m7ph7" podUID="8c172ce5-f64e-417d-9fc7-e06c5e443fbc" Oct 01 13:45:53 crc kubenswrapper[4605]: I1001 13:45:53.926273 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:45:53 crc kubenswrapper[4605]: E1001 13:45:53.926473 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 13:45:53 crc kubenswrapper[4605]: E1001 13:45:53.926515 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 13:45:53 crc kubenswrapper[4605]: E1001 13:45:53.926553 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 13:45:53 crc kubenswrapper[4605]: I1001 13:45:53.972962 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:53 crc kubenswrapper[4605]: I1001 13:45:53.973007 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:53 crc kubenswrapper[4605]: I1001 13:45:53.973020 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:53 crc kubenswrapper[4605]: I1001 13:45:53.973037 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:53 crc kubenswrapper[4605]: I1001 13:45:53.973052 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:53Z","lastTransitionTime":"2025-10-01T13:45:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:54 crc kubenswrapper[4605]: I1001 13:45:54.075102 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:54 crc kubenswrapper[4605]: I1001 13:45:54.075146 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:54 crc kubenswrapper[4605]: I1001 13:45:54.075156 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:54 crc kubenswrapper[4605]: I1001 13:45:54.075169 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:54 crc kubenswrapper[4605]: I1001 13:45:54.075179 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:54Z","lastTransitionTime":"2025-10-01T13:45:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:54 crc kubenswrapper[4605]: I1001 13:45:54.177774 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:54 crc kubenswrapper[4605]: I1001 13:45:54.177846 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:54 crc kubenswrapper[4605]: I1001 13:45:54.177857 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:54 crc kubenswrapper[4605]: I1001 13:45:54.177871 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:54 crc kubenswrapper[4605]: I1001 13:45:54.177883 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:54Z","lastTransitionTime":"2025-10-01T13:45:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:54 crc kubenswrapper[4605]: I1001 13:45:54.280652 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:54 crc kubenswrapper[4605]: I1001 13:45:54.280722 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:54 crc kubenswrapper[4605]: I1001 13:45:54.280740 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:54 crc kubenswrapper[4605]: I1001 13:45:54.280765 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:54 crc kubenswrapper[4605]: I1001 13:45:54.280783 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:54Z","lastTransitionTime":"2025-10-01T13:45:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:54 crc kubenswrapper[4605]: I1001 13:45:54.383658 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:54 crc kubenswrapper[4605]: I1001 13:45:54.383743 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:54 crc kubenswrapper[4605]: I1001 13:45:54.383756 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:54 crc kubenswrapper[4605]: I1001 13:45:54.383774 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:54 crc kubenswrapper[4605]: I1001 13:45:54.383793 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:54Z","lastTransitionTime":"2025-10-01T13:45:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:54 crc kubenswrapper[4605]: I1001 13:45:54.486923 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:54 crc kubenswrapper[4605]: I1001 13:45:54.486989 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:54 crc kubenswrapper[4605]: I1001 13:45:54.487006 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:54 crc kubenswrapper[4605]: I1001 13:45:54.487030 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:54 crc kubenswrapper[4605]: I1001 13:45:54.487049 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:54Z","lastTransitionTime":"2025-10-01T13:45:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:54 crc kubenswrapper[4605]: I1001 13:45:54.589903 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:54 crc kubenswrapper[4605]: I1001 13:45:54.589947 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:54 crc kubenswrapper[4605]: I1001 13:45:54.589956 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:54 crc kubenswrapper[4605]: I1001 13:45:54.589974 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:54 crc kubenswrapper[4605]: I1001 13:45:54.589984 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:54Z","lastTransitionTime":"2025-10-01T13:45:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:54 crc kubenswrapper[4605]: I1001 13:45:54.692859 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:54 crc kubenswrapper[4605]: I1001 13:45:54.692906 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:54 crc kubenswrapper[4605]: I1001 13:45:54.692918 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:54 crc kubenswrapper[4605]: I1001 13:45:54.692936 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:54 crc kubenswrapper[4605]: I1001 13:45:54.692948 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:54Z","lastTransitionTime":"2025-10-01T13:45:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:54 crc kubenswrapper[4605]: I1001 13:45:54.796040 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:54 crc kubenswrapper[4605]: I1001 13:45:54.796124 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:54 crc kubenswrapper[4605]: I1001 13:45:54.796143 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:54 crc kubenswrapper[4605]: I1001 13:45:54.796164 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:54 crc kubenswrapper[4605]: I1001 13:45:54.796179 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:54Z","lastTransitionTime":"2025-10-01T13:45:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:54 crc kubenswrapper[4605]: I1001 13:45:54.899089 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:54 crc kubenswrapper[4605]: I1001 13:45:54.899201 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:54 crc kubenswrapper[4605]: I1001 13:45:54.899220 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:54 crc kubenswrapper[4605]: I1001 13:45:54.899245 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:54 crc kubenswrapper[4605]: I1001 13:45:54.899265 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:54Z","lastTransitionTime":"2025-10-01T13:45:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.002360 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.002751 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.002822 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.003195 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.003298 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:55Z","lastTransitionTime":"2025-10-01T13:45:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.105877 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.105929 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.105945 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.105970 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.105986 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:55Z","lastTransitionTime":"2025-10-01T13:45:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.208848 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.208894 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.208904 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.208918 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.208929 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:55Z","lastTransitionTime":"2025-10-01T13:45:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.311811 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.311867 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.311880 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.311898 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.311912 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:55Z","lastTransitionTime":"2025-10-01T13:45:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.415740 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.415824 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.415852 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.415885 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.415908 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:55Z","lastTransitionTime":"2025-10-01T13:45:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.480936 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.481050 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.481209 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.481245 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.481284 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:55Z","lastTransitionTime":"2025-10-01T13:45:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:55 crc kubenswrapper[4605]: E1001 13:45:55.497761 4605 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1d17ca42-5162-4e53-b9d0-0c11f7d91daa\\\",\\\"systemUUID\\\":\\\"1ac84113-1352-4ad6-8d32-f12829b39b5d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:55Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.502290 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.502357 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.502372 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.502394 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.502413 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:55Z","lastTransitionTime":"2025-10-01T13:45:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:55 crc kubenswrapper[4605]: E1001 13:45:55.517861 4605 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1d17ca42-5162-4e53-b9d0-0c11f7d91daa\\\",\\\"systemUUID\\\":\\\"1ac84113-1352-4ad6-8d32-f12829b39b5d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:55Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.522868 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.522908 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.522919 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.522934 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.522945 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:55Z","lastTransitionTime":"2025-10-01T13:45:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:55 crc kubenswrapper[4605]: E1001 13:45:55.538360 4605 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1d17ca42-5162-4e53-b9d0-0c11f7d91daa\\\",\\\"systemUUID\\\":\\\"1ac84113-1352-4ad6-8d32-f12829b39b5d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:55Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.542895 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.542984 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.543004 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.543032 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.543054 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:55Z","lastTransitionTime":"2025-10-01T13:45:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:55 crc kubenswrapper[4605]: E1001 13:45:55.558178 4605 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1d17ca42-5162-4e53-b9d0-0c11f7d91daa\\\",\\\"systemUUID\\\":\\\"1ac84113-1352-4ad6-8d32-f12829b39b5d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:55Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.561939 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.561988 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.562003 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.562025 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.562038 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:55Z","lastTransitionTime":"2025-10-01T13:45:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:55 crc kubenswrapper[4605]: E1001 13:45:55.575643 4605 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:45:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1d17ca42-5162-4e53-b9d0-0c11f7d91daa\\\",\\\"systemUUID\\\":\\\"1ac84113-1352-4ad6-8d32-f12829b39b5d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:55Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:55 crc kubenswrapper[4605]: E1001 13:45:55.575813 4605 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.577718 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.577756 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.577769 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.577829 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.577846 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:55Z","lastTransitionTime":"2025-10-01T13:45:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.679544 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.679600 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.679625 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.679649 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.679666 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:55Z","lastTransitionTime":"2025-10-01T13:45:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.782804 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.782846 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.782861 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.782884 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.782903 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:55Z","lastTransitionTime":"2025-10-01T13:45:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.885646 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.885687 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.885703 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.885723 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.885738 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:55Z","lastTransitionTime":"2025-10-01T13:45:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.926630 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.926720 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.926923 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.926632 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-m7ph7" Oct 01 13:45:55 crc kubenswrapper[4605]: E1001 13:45:55.927174 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 13:45:55 crc kubenswrapper[4605]: E1001 13:45:55.927406 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 13:45:55 crc kubenswrapper[4605]: E1001 13:45:55.927508 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-m7ph7" podUID="8c172ce5-f64e-417d-9fc7-e06c5e443fbc" Oct 01 13:45:55 crc kubenswrapper[4605]: E1001 13:45:55.926857 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.989943 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.989992 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.990008 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.990032 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:55 crc kubenswrapper[4605]: I1001 13:45:55.990050 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:55Z","lastTransitionTime":"2025-10-01T13:45:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:56 crc kubenswrapper[4605]: I1001 13:45:56.093787 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:56 crc kubenswrapper[4605]: I1001 13:45:56.093857 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:56 crc kubenswrapper[4605]: I1001 13:45:56.093867 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:56 crc kubenswrapper[4605]: I1001 13:45:56.093886 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:56 crc kubenswrapper[4605]: I1001 13:45:56.093900 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:56Z","lastTransitionTime":"2025-10-01T13:45:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:56 crc kubenswrapper[4605]: I1001 13:45:56.196444 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:56 crc kubenswrapper[4605]: I1001 13:45:56.196507 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:56 crc kubenswrapper[4605]: I1001 13:45:56.196527 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:56 crc kubenswrapper[4605]: I1001 13:45:56.196554 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:56 crc kubenswrapper[4605]: I1001 13:45:56.196574 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:56Z","lastTransitionTime":"2025-10-01T13:45:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:56 crc kubenswrapper[4605]: I1001 13:45:56.299541 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:56 crc kubenswrapper[4605]: I1001 13:45:56.299622 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:56 crc kubenswrapper[4605]: I1001 13:45:56.299645 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:56 crc kubenswrapper[4605]: I1001 13:45:56.299675 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:56 crc kubenswrapper[4605]: I1001 13:45:56.299694 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:56Z","lastTransitionTime":"2025-10-01T13:45:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:56 crc kubenswrapper[4605]: I1001 13:45:56.402834 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:56 crc kubenswrapper[4605]: I1001 13:45:56.402906 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:56 crc kubenswrapper[4605]: I1001 13:45:56.402925 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:56 crc kubenswrapper[4605]: I1001 13:45:56.402950 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:56 crc kubenswrapper[4605]: I1001 13:45:56.402972 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:56Z","lastTransitionTime":"2025-10-01T13:45:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:56 crc kubenswrapper[4605]: I1001 13:45:56.506578 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:56 crc kubenswrapper[4605]: I1001 13:45:56.506699 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:56 crc kubenswrapper[4605]: I1001 13:45:56.506719 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:56 crc kubenswrapper[4605]: I1001 13:45:56.506802 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:56 crc kubenswrapper[4605]: I1001 13:45:56.507370 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:56Z","lastTransitionTime":"2025-10-01T13:45:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:56 crc kubenswrapper[4605]: I1001 13:45:56.610892 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:56 crc kubenswrapper[4605]: I1001 13:45:56.610957 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:56 crc kubenswrapper[4605]: I1001 13:45:56.610974 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:56 crc kubenswrapper[4605]: I1001 13:45:56.610992 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:56 crc kubenswrapper[4605]: I1001 13:45:56.611010 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:56Z","lastTransitionTime":"2025-10-01T13:45:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:56 crc kubenswrapper[4605]: I1001 13:45:56.713996 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:56 crc kubenswrapper[4605]: I1001 13:45:56.714032 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:56 crc kubenswrapper[4605]: I1001 13:45:56.714042 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:56 crc kubenswrapper[4605]: I1001 13:45:56.714059 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:56 crc kubenswrapper[4605]: I1001 13:45:56.714070 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:56Z","lastTransitionTime":"2025-10-01T13:45:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:56 crc kubenswrapper[4605]: I1001 13:45:56.816996 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:56 crc kubenswrapper[4605]: I1001 13:45:56.817073 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:56 crc kubenswrapper[4605]: I1001 13:45:56.817124 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:56 crc kubenswrapper[4605]: I1001 13:45:56.817157 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:56 crc kubenswrapper[4605]: I1001 13:45:56.817182 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:56Z","lastTransitionTime":"2025-10-01T13:45:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:56 crc kubenswrapper[4605]: I1001 13:45:56.946239 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:56 crc kubenswrapper[4605]: I1001 13:45:56.946310 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:56 crc kubenswrapper[4605]: I1001 13:45:56.946335 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:56 crc kubenswrapper[4605]: I1001 13:45:56.946362 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:56 crc kubenswrapper[4605]: I1001 13:45:56.946385 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:56Z","lastTransitionTime":"2025-10-01T13:45:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:57 crc kubenswrapper[4605]: I1001 13:45:57.050482 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:57 crc kubenswrapper[4605]: I1001 13:45:57.050546 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:57 crc kubenswrapper[4605]: I1001 13:45:57.050568 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:57 crc kubenswrapper[4605]: I1001 13:45:57.050598 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:57 crc kubenswrapper[4605]: I1001 13:45:57.050621 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:57Z","lastTransitionTime":"2025-10-01T13:45:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:57 crc kubenswrapper[4605]: I1001 13:45:57.155526 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:57 crc kubenswrapper[4605]: I1001 13:45:57.155603 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:57 crc kubenswrapper[4605]: I1001 13:45:57.155626 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:57 crc kubenswrapper[4605]: I1001 13:45:57.155661 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:57 crc kubenswrapper[4605]: I1001 13:45:57.155682 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:57Z","lastTransitionTime":"2025-10-01T13:45:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:57 crc kubenswrapper[4605]: I1001 13:45:57.259309 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:57 crc kubenswrapper[4605]: I1001 13:45:57.259383 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:57 crc kubenswrapper[4605]: I1001 13:45:57.259406 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:57 crc kubenswrapper[4605]: I1001 13:45:57.259432 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:57 crc kubenswrapper[4605]: I1001 13:45:57.259466 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:57Z","lastTransitionTime":"2025-10-01T13:45:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:57 crc kubenswrapper[4605]: I1001 13:45:57.363075 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:57 crc kubenswrapper[4605]: I1001 13:45:57.363138 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:57 crc kubenswrapper[4605]: I1001 13:45:57.363150 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:57 crc kubenswrapper[4605]: I1001 13:45:57.363163 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:57 crc kubenswrapper[4605]: I1001 13:45:57.363172 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:57Z","lastTransitionTime":"2025-10-01T13:45:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:57 crc kubenswrapper[4605]: I1001 13:45:57.465923 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:57 crc kubenswrapper[4605]: I1001 13:45:57.466385 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:57 crc kubenswrapper[4605]: I1001 13:45:57.466525 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:57 crc kubenswrapper[4605]: I1001 13:45:57.466667 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:57 crc kubenswrapper[4605]: I1001 13:45:57.466793 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:57Z","lastTransitionTime":"2025-10-01T13:45:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:57 crc kubenswrapper[4605]: I1001 13:45:57.570940 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:57 crc kubenswrapper[4605]: I1001 13:45:57.571004 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:57 crc kubenswrapper[4605]: I1001 13:45:57.571018 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:57 crc kubenswrapper[4605]: I1001 13:45:57.571039 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:57 crc kubenswrapper[4605]: I1001 13:45:57.571055 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:57Z","lastTransitionTime":"2025-10-01T13:45:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:57 crc kubenswrapper[4605]: I1001 13:45:57.674080 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:57 crc kubenswrapper[4605]: I1001 13:45:57.674152 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:57 crc kubenswrapper[4605]: I1001 13:45:57.674165 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:57 crc kubenswrapper[4605]: I1001 13:45:57.674182 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:57 crc kubenswrapper[4605]: I1001 13:45:57.674194 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:57Z","lastTransitionTime":"2025-10-01T13:45:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:57 crc kubenswrapper[4605]: I1001 13:45:57.777388 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:57 crc kubenswrapper[4605]: I1001 13:45:57.778452 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:57 crc kubenswrapper[4605]: I1001 13:45:57.778620 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:57 crc kubenswrapper[4605]: I1001 13:45:57.778772 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:57 crc kubenswrapper[4605]: I1001 13:45:57.778922 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:57Z","lastTransitionTime":"2025-10-01T13:45:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:57 crc kubenswrapper[4605]: I1001 13:45:57.883408 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:57 crc kubenswrapper[4605]: I1001 13:45:57.884048 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:57 crc kubenswrapper[4605]: I1001 13:45:57.884290 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:57 crc kubenswrapper[4605]: I1001 13:45:57.884465 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:57 crc kubenswrapper[4605]: I1001 13:45:57.884610 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:57Z","lastTransitionTime":"2025-10-01T13:45:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:57 crc kubenswrapper[4605]: I1001 13:45:57.927408 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 13:45:57 crc kubenswrapper[4605]: I1001 13:45:57.927500 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-m7ph7" Oct 01 13:45:57 crc kubenswrapper[4605]: E1001 13:45:57.927672 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 13:45:57 crc kubenswrapper[4605]: I1001 13:45:57.927698 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 13:45:57 crc kubenswrapper[4605]: E1001 13:45:57.928078 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-m7ph7" podUID="8c172ce5-f64e-417d-9fc7-e06c5e443fbc" Oct 01 13:45:57 crc kubenswrapper[4605]: E1001 13:45:57.928406 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 13:45:57 crc kubenswrapper[4605]: I1001 13:45:57.928429 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:45:57 crc kubenswrapper[4605]: I1001 13:45:57.930278 4605 scope.go:117] "RemoveContainer" containerID="e41ab9ad83951f275a359365c118fdba9ea91417dc52a9cd98b0a55f84788c64" Oct 01 13:45:57 crc kubenswrapper[4605]: E1001 13:45:57.930797 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 13:45:57 crc kubenswrapper[4605]: I1001 13:45:57.955020 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d3d0d59e6a31efc844c56e1ad43cd326a7b2f1844784f2814469e36394cf377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:57Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:57 crc kubenswrapper[4605]: I1001 13:45:57.981081 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64e2eda677e75448f2e7e3fd477052f3a596e6c11d745848becc6c22f133e6b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:57Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:57 crc kubenswrapper[4605]: I1001 13:45:57.987885 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:57 crc kubenswrapper[4605]: I1001 13:45:57.987931 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:57 crc kubenswrapper[4605]: I1001 13:45:57.987951 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:57 crc kubenswrapper[4605]: I1001 13:45:57.987976 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:57 crc kubenswrapper[4605]: I1001 13:45:57.987997 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:57Z","lastTransitionTime":"2025-10-01T13:45:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:57 crc kubenswrapper[4605]: I1001 13:45:57.998617 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6zb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83630902-b99b-4944-81a4-487e9584e0c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0762e1125229327b00202fd05bc17fd641b76f2421e20d0672b3e2d3b0f7538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jt4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6zb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:57Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:58 crc kubenswrapper[4605]: I1001 13:45:58.013020 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jvqzn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4d5a988-e2c8-47db-b738-cb43467b1bfb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17326e690c100180a8e3d1a0180522378ef995bd1f5cba52d6f147a958351118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-985lh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:45:07Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jvqzn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:58Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:58 crc kubenswrapper[4605]: I1001 13:45:58.038639 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eda516c-c3a2-4e46-b9c2-b603ebc2d618\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c15c835868f9e04648db64fcf271a4c212d406e7bcc7cfca15167b853a02a70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d0dbfcc18f63ddc136fc049c023f326cb882adbcf1123f1ec5d4b884ae3970a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed56fd63f436435206d3574740efba75e8a94582fe128388e03b398131acd4ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80bd23f5be42dc5714bba2b0f742b8aa9fdd60540db1048054e76c00f356b240\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83498000b4c2e2ea1b498069626fa6916e9fc5e95a3ecf0136b2cb0ca8a409b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759326292\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759326291\\\\\\\\\\\\\\\" (2025-10-01 12:44:51 +0000 UTC to 2026-10-01 12:44:51 +0000 UTC (now=2025-10-01 13:44:57.347830124 +0000 UTC))\\\\\\\"\\\\nI1001 13:44:57.347872 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1001 13:44:57.347895 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1001 13:44:57.347920 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347949 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347989 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4229714179/tls.crt::/tmp/serving-cert-4229714179/tls.key\\\\\\\"\\\\nI1001 13:44:57.348126 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348139 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1001 13:44:57.348152 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348158 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1001 13:44:57.348154 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1001 13:44:57.348212 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1001 13:44:57.348222 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF1001 13:44:57.350577 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:41Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://113f3599c725570c4484ed005921c30a43db5ffec24b72907c6c7546453fe363\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:58Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:58 crc kubenswrapper[4605]: I1001 13:45:58.057668 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ca1d91d-0902-4d3a-b66a-a556b5009d8b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acf6d9c6b834cf378303c7ee6e1af3f3cde2502d8f28a6e5d3ec33deb69434b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4200a3723d31af3d800fca144949b047d3ef2960d856f286899351523593061\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://671dc002b0ca1a50b36373cbf0a8971b0f751989c9f19acedb524b09afd53517\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ab08a748b9c3040ea1af963f8ebeef630d7fb260122baba05229615424850d6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:58Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:58 crc kubenswrapper[4605]: I1001 13:45:58.071991 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e761e16e-22e8-4656-94dc-0911ae6f6f05\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c3d4d08023677ab885a54e3844be97affb4cf2b7629495bd3ff5c2ca2ea572e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb5327594e9731b96a498f5d9b0c5011693e2c7ef4af097649f9a4fce12aaf24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e397c3307f660a4316c953e5b4a6440a8fd584b7d06b514833a2514c99f5d22b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2705745b03d5e0b8fdb38b0828944ce25553b45328d0ffac4762c3007c37a376\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2705745b03d5e0b8fdb38b0828944ce25553b45328d0ffac4762c3007c37a376\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:58Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:58 crc kubenswrapper[4605]: I1001 13:45:58.088621 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:58Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:58 crc kubenswrapper[4605]: I1001 13:45:58.091448 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:58 crc kubenswrapper[4605]: I1001 13:45:58.091485 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:58 crc kubenswrapper[4605]: I1001 13:45:58.091497 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:58 crc kubenswrapper[4605]: I1001 13:45:58.091514 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:58 crc kubenswrapper[4605]: I1001 13:45:58.091526 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:58Z","lastTransitionTime":"2025-10-01T13:45:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:58 crc kubenswrapper[4605]: I1001 13:45:58.103674 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c3d55cda00f6c5763662b3f96b4c36f1fb8c220fcd14ff3469b565deac718a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e6578a529a458eab6242d8f667520303c65bd53ceba7598d4c9680c7a93bace\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:58Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:58 crc kubenswrapper[4605]: I1001 13:45:58.118207 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:58Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:58 crc kubenswrapper[4605]: I1001 13:45:58.136547 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:58Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:58 crc kubenswrapper[4605]: I1001 13:45:58.148838 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-m7ph7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8c172ce5-f64e-417d-9fc7-e06c5e443fbc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5krq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5krq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:45:13Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-m7ph7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:58Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:58 crc kubenswrapper[4605]: I1001 13:45:58.160509 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3023060-c8ae-492b-b1cb-a418d9a8e59f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd1361d1d9cb03a3942918266a1e85d3e370eabdfa7b7b1e40971995928187b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae5b8e3f4bd159c632b04545707c7140ba6fcee21a3a3847d5e7f2b9e41b9178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdjh7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:58Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:58 crc kubenswrapper[4605]: I1001 13:45:58.176842 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xclfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9fc4aae-03cb-458d-83cb-1a3ab9fa9639\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f8eab67511426f65e14781420480f85ea6b0d9ab25f6846a68820a1ded0053f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://14d2a7aa9a0027ef28b5ec7b288ce0a5367c53f616ed3e75216d07c9809a86a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14d2a7aa9a0027ef28b5ec7b288ce0a5367c53f616ed3e75216d07c9809a86a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0abe21aad2433d34c4a769e87e28f3c479511a4580df607d1496ed33c85385c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0abe21aad2433d34c4a769e87e28f3c479511a4580df607d1496ed33c85385c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b870108ecd4816a8d0d3d32f4b2384246c0b4537639e9cd04d660fea3246244\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b870108ecd4816a8d0d3d32f4b2384246c0b4537639e9cd04d660fea3246244\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b96314b06953b56383a86305cf1702c5f02add621d7981a36444d2d3998cf0f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b96314b06953b56383a86305cf1702c5f02add621d7981a36444d2d3998cf0f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f0381694561100e524cc380f1a2c6050c7192f27ec8f27ad2e41abce969c1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24f0381694561100e524cc380f1a2c6050c7192f27ec8f27ad2e41abce969c1c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfb30493a0c650f9536bba6417da0a582b1b23b6488223359856c12d1024d57f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfb30493a0c650f9536bba6417da0a582b1b23b6488223359856c12d1024d57f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xclfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:58Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:58 crc kubenswrapper[4605]: I1001 13:45:58.191785 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wgx5p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c2ca71f-4cb0-4852-927d-af69be5d77f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c2a3ea80efc4b22dd64e7627c18212081f51882aa7616ba8bfa3f2b116f4bf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1180cac382ded8ae1a7be2e5738d96beceed10f750d31e36ae1520416a71e8dd\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T13:45:47Z\\\",\\\"message\\\":\\\"2025-10-01T13:45:01+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_f8cd6dc3-6a91-4298-bd9c-6dedaf40e61a\\\\n2025-10-01T13:45:01+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_f8cd6dc3-6a91-4298-bd9c-6dedaf40e61a to /host/opt/cni/bin/\\\\n2025-10-01T13:45:01Z [verbose] multus-daemon started\\\\n2025-10-01T13:45:01Z [verbose] Readiness Indicator file check\\\\n2025-10-01T13:45:46Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7gtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wgx5p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:58Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:58 crc kubenswrapper[4605]: I1001 13:45:58.194232 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:58 crc kubenswrapper[4605]: I1001 13:45:58.194270 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:58 crc kubenswrapper[4605]: I1001 13:45:58.194285 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:58 crc kubenswrapper[4605]: I1001 13:45:58.194306 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:58 crc kubenswrapper[4605]: I1001 13:45:58.194319 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:58Z","lastTransitionTime":"2025-10-01T13:45:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:58 crc kubenswrapper[4605]: I1001 13:45:58.215435 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://56f2691df87d1c843a73c06c39c7ad1d5477fb1f10c6f3f797f658e7d0c0d04d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09ecc57bc8186c325ef0bd38c17882024dda3714b52f9b0295a75ed93b2d6736\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f6f39aa4d204b41067d2705ce828e79752ca4e5b7689821e34c98f5041ce925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddba08349bbb0f7040bb04f37d4b5a137de327a5c4a7a5689396a2a1925b062a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://98143f416c776bf79ee24b42b70d88d1cbe4528bd7a1c0fc886a8f6e711c6400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8442849a4d37d618e701a2ea4c1dd33944894765c874fd5635c74d37a7f411b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e41ab9ad83951f275a359365c118fdba9ea91417dc52a9cd98b0a55f84788c64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e41ab9ad83951f275a359365c118fdba9ea91417dc52a9cd98b0a55f84788c64\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T13:45:28Z\\\",\\\"message\\\":\\\"onal-cni-plugins-xclfn\\\\nI1001 13:45:28.238175 6140 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1001 13:45:28.238172 6140 lb_config.go:1031] Cluster endpoints for default/kubernetes for network=default are: map[TCP/https:{6443 [192.168.126.11] []}]\\\\nI1001 13:45:28.238207 6140 services_controller.go:443] Built service default/kubernetes LB cluster-wide configs for network=default: []services.lbConfig(nil)\\\\nI1001 13:45:28.238215 6140 services_controller.go:444] Built service default/kubernetes LB per-node configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.4.1\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:443, clusterEndpoints:services.lbEndpoints{Port:6443, V4IPs:[]string{\\\\\\\"192.168.126.11\\\\\\\"}, V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI1001 13:45:28.238228 6140 services_controller.go:445] Built service default/kubernetes LB template configs for network=default: []services.lbConfig(nil)\\\\nF1001 13:45:28.238233 6140 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handle\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:27Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-kzv4p_openshift-ovn-kubernetes(e0b90c02-c41c-4f5b-ae0a-c6444435a3ae)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d1219f1caadb9f0992fa56b158a23ace3616d178360c4690657deb351a3ccaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-kzv4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:58Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:58 crc kubenswrapper[4605]: I1001 13:45:58.235133 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gqbr2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a6400c9-7945-44a6-b37d-e94811fc9754\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c32729bcc338f810b05ad3ac0cd10feeeaff031cd81540b79fec759bbc2b419\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2jrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51a4fc25d3fb27e7ea7fba17367d9a0b3fefa388d306e3589ec13e3cf6e61452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2jrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:45:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-gqbr2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:58Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:58 crc kubenswrapper[4605]: I1001 13:45:58.297401 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:58 crc kubenswrapper[4605]: I1001 13:45:58.297485 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:58 crc kubenswrapper[4605]: I1001 13:45:58.297528 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:58 crc kubenswrapper[4605]: I1001 13:45:58.297609 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:58 crc kubenswrapper[4605]: I1001 13:45:58.297647 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:58Z","lastTransitionTime":"2025-10-01T13:45:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:58 crc kubenswrapper[4605]: I1001 13:45:58.400533 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:58 crc kubenswrapper[4605]: I1001 13:45:58.400577 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:58 crc kubenswrapper[4605]: I1001 13:45:58.400587 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:58 crc kubenswrapper[4605]: I1001 13:45:58.400604 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:58 crc kubenswrapper[4605]: I1001 13:45:58.400617 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:58Z","lastTransitionTime":"2025-10-01T13:45:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:58 crc kubenswrapper[4605]: I1001 13:45:58.504206 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:58 crc kubenswrapper[4605]: I1001 13:45:58.504765 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:58 crc kubenswrapper[4605]: I1001 13:45:58.504778 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:58 crc kubenswrapper[4605]: I1001 13:45:58.504797 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:58 crc kubenswrapper[4605]: I1001 13:45:58.504811 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:58Z","lastTransitionTime":"2025-10-01T13:45:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:58 crc kubenswrapper[4605]: I1001 13:45:58.536385 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-kzv4p_e0b90c02-c41c-4f5b-ae0a-c6444435a3ae/ovnkube-controller/2.log" Oct 01 13:45:58 crc kubenswrapper[4605]: I1001 13:45:58.538931 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" event={"ID":"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae","Type":"ContainerStarted","Data":"0be438218d8ebb0fffe42a99bb838e7aa749b7749c1ca18fb294cecb4fe8b4e4"} Oct 01 13:45:58 crc kubenswrapper[4605]: I1001 13:45:58.607738 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:58 crc kubenswrapper[4605]: I1001 13:45:58.607781 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:58 crc kubenswrapper[4605]: I1001 13:45:58.607792 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:58 crc kubenswrapper[4605]: I1001 13:45:58.607808 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:58 crc kubenswrapper[4605]: I1001 13:45:58.607820 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:58Z","lastTransitionTime":"2025-10-01T13:45:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:58 crc kubenswrapper[4605]: I1001 13:45:58.711177 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:58 crc kubenswrapper[4605]: I1001 13:45:58.711215 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:58 crc kubenswrapper[4605]: I1001 13:45:58.711236 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:58 crc kubenswrapper[4605]: I1001 13:45:58.711257 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:58 crc kubenswrapper[4605]: I1001 13:45:58.711287 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:58Z","lastTransitionTime":"2025-10-01T13:45:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:58 crc kubenswrapper[4605]: I1001 13:45:58.814364 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:58 crc kubenswrapper[4605]: I1001 13:45:58.814415 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:58 crc kubenswrapper[4605]: I1001 13:45:58.814427 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:58 crc kubenswrapper[4605]: I1001 13:45:58.814445 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:58 crc kubenswrapper[4605]: I1001 13:45:58.814458 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:58Z","lastTransitionTime":"2025-10-01T13:45:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:58 crc kubenswrapper[4605]: I1001 13:45:58.917479 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:58 crc kubenswrapper[4605]: I1001 13:45:58.917539 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:58 crc kubenswrapper[4605]: I1001 13:45:58.917554 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:58 crc kubenswrapper[4605]: I1001 13:45:58.917594 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:58 crc kubenswrapper[4605]: I1001 13:45:58.917607 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:58Z","lastTransitionTime":"2025-10-01T13:45:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:59 crc kubenswrapper[4605]: I1001 13:45:59.020983 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:59 crc kubenswrapper[4605]: I1001 13:45:59.021049 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:59 crc kubenswrapper[4605]: I1001 13:45:59.021067 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:59 crc kubenswrapper[4605]: I1001 13:45:59.021119 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:59 crc kubenswrapper[4605]: I1001 13:45:59.021144 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:59Z","lastTransitionTime":"2025-10-01T13:45:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:59 crc kubenswrapper[4605]: I1001 13:45:59.125492 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:59 crc kubenswrapper[4605]: I1001 13:45:59.125560 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:59 crc kubenswrapper[4605]: I1001 13:45:59.125597 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:59 crc kubenswrapper[4605]: I1001 13:45:59.125624 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:59 crc kubenswrapper[4605]: I1001 13:45:59.125654 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:59Z","lastTransitionTime":"2025-10-01T13:45:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:59 crc kubenswrapper[4605]: I1001 13:45:59.228489 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:59 crc kubenswrapper[4605]: I1001 13:45:59.228531 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:59 crc kubenswrapper[4605]: I1001 13:45:59.228545 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:59 crc kubenswrapper[4605]: I1001 13:45:59.228568 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:59 crc kubenswrapper[4605]: I1001 13:45:59.228581 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:59Z","lastTransitionTime":"2025-10-01T13:45:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:59 crc kubenswrapper[4605]: I1001 13:45:59.331748 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:59 crc kubenswrapper[4605]: I1001 13:45:59.331804 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:59 crc kubenswrapper[4605]: I1001 13:45:59.331816 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:59 crc kubenswrapper[4605]: I1001 13:45:59.331836 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:59 crc kubenswrapper[4605]: I1001 13:45:59.331853 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:59Z","lastTransitionTime":"2025-10-01T13:45:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:59 crc kubenswrapper[4605]: I1001 13:45:59.434749 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:59 crc kubenswrapper[4605]: I1001 13:45:59.434829 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:59 crc kubenswrapper[4605]: I1001 13:45:59.434863 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:59 crc kubenswrapper[4605]: I1001 13:45:59.434895 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:59 crc kubenswrapper[4605]: I1001 13:45:59.434917 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:59Z","lastTransitionTime":"2025-10-01T13:45:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:59 crc kubenswrapper[4605]: I1001 13:45:59.539053 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:59 crc kubenswrapper[4605]: I1001 13:45:59.539152 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:59 crc kubenswrapper[4605]: I1001 13:45:59.539199 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:59 crc kubenswrapper[4605]: I1001 13:45:59.539228 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:59 crc kubenswrapper[4605]: I1001 13:45:59.539251 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:59Z","lastTransitionTime":"2025-10-01T13:45:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:59 crc kubenswrapper[4605]: I1001 13:45:59.543366 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" Oct 01 13:45:59 crc kubenswrapper[4605]: I1001 13:45:59.560301 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:59Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:59 crc kubenswrapper[4605]: I1001 13:45:59.581470 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eda516c-c3a2-4e46-b9c2-b603ebc2d618\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c15c835868f9e04648db64fcf271a4c212d406e7bcc7cfca15167b853a02a70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d0dbfcc18f63ddc136fc049c023f326cb882adbcf1123f1ec5d4b884ae3970a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed56fd63f436435206d3574740efba75e8a94582fe128388e03b398131acd4ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80bd23f5be42dc5714bba2b0f742b8aa9fdd60540db1048054e76c00f356b240\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83498000b4c2e2ea1b498069626fa6916e9fc5e95a3ecf0136b2cb0ca8a409b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759326292\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759326291\\\\\\\\\\\\\\\" (2025-10-01 12:44:51 +0000 UTC to 2026-10-01 12:44:51 +0000 UTC (now=2025-10-01 13:44:57.347830124 +0000 UTC))\\\\\\\"\\\\nI1001 13:44:57.347872 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1001 13:44:57.347895 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1001 13:44:57.347920 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347949 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347989 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4229714179/tls.crt::/tmp/serving-cert-4229714179/tls.key\\\\\\\"\\\\nI1001 13:44:57.348126 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348139 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1001 13:44:57.348152 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348158 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1001 13:44:57.348154 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1001 13:44:57.348212 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1001 13:44:57.348222 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF1001 13:44:57.350577 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:41Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://113f3599c725570c4484ed005921c30a43db5ffec24b72907c6c7546453fe363\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:59Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:59 crc kubenswrapper[4605]: I1001 13:45:59.598174 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ca1d91d-0902-4d3a-b66a-a556b5009d8b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acf6d9c6b834cf378303c7ee6e1af3f3cde2502d8f28a6e5d3ec33deb69434b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4200a3723d31af3d800fca144949b047d3ef2960d856f286899351523593061\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://671dc002b0ca1a50b36373cbf0a8971b0f751989c9f19acedb524b09afd53517\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ab08a748b9c3040ea1af963f8ebeef630d7fb260122baba05229615424850d6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:59Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:59 crc kubenswrapper[4605]: I1001 13:45:59.616652 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e761e16e-22e8-4656-94dc-0911ae6f6f05\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c3d4d08023677ab885a54e3844be97affb4cf2b7629495bd3ff5c2ca2ea572e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb5327594e9731b96a498f5d9b0c5011693e2c7ef4af097649f9a4fce12aaf24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e397c3307f660a4316c953e5b4a6440a8fd584b7d06b514833a2514c99f5d22b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2705745b03d5e0b8fdb38b0828944ce25553b45328d0ffac4762c3007c37a376\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2705745b03d5e0b8fdb38b0828944ce25553b45328d0ffac4762c3007c37a376\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:59Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:59 crc kubenswrapper[4605]: I1001 13:45:59.634709 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c3d55cda00f6c5763662b3f96b4c36f1fb8c220fcd14ff3469b565deac718a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e6578a529a458eab6242d8f667520303c65bd53ceba7598d4c9680c7a93bace\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:59Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:59 crc kubenswrapper[4605]: I1001 13:45:59.641849 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:59 crc kubenswrapper[4605]: I1001 13:45:59.641876 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:59 crc kubenswrapper[4605]: I1001 13:45:59.641922 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:59 crc kubenswrapper[4605]: I1001 13:45:59.641940 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:59 crc kubenswrapper[4605]: I1001 13:45:59.641951 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:59Z","lastTransitionTime":"2025-10-01T13:45:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:59 crc kubenswrapper[4605]: I1001 13:45:59.648692 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:59Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:59 crc kubenswrapper[4605]: I1001 13:45:59.660480 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:59Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:59 crc kubenswrapper[4605]: I1001 13:45:59.671857 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-m7ph7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8c172ce5-f64e-417d-9fc7-e06c5e443fbc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5krq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5krq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:45:13Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-m7ph7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:59Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:59 crc kubenswrapper[4605]: I1001 13:45:59.683409 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3023060-c8ae-492b-b1cb-a418d9a8e59f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd1361d1d9cb03a3942918266a1e85d3e370eabdfa7b7b1e40971995928187b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae5b8e3f4bd159c632b04545707c7140ba6fcee21a3a3847d5e7f2b9e41b9178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdjh7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:59Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:59 crc kubenswrapper[4605]: I1001 13:45:59.698624 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xclfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9fc4aae-03cb-458d-83cb-1a3ab9fa9639\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f8eab67511426f65e14781420480f85ea6b0d9ab25f6846a68820a1ded0053f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://14d2a7aa9a0027ef28b5ec7b288ce0a5367c53f616ed3e75216d07c9809a86a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14d2a7aa9a0027ef28b5ec7b288ce0a5367c53f616ed3e75216d07c9809a86a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0abe21aad2433d34c4a769e87e28f3c479511a4580df607d1496ed33c85385c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0abe21aad2433d34c4a769e87e28f3c479511a4580df607d1496ed33c85385c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b870108ecd4816a8d0d3d32f4b2384246c0b4537639e9cd04d660fea3246244\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b870108ecd4816a8d0d3d32f4b2384246c0b4537639e9cd04d660fea3246244\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b96314b06953b56383a86305cf1702c5f02add621d7981a36444d2d3998cf0f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b96314b06953b56383a86305cf1702c5f02add621d7981a36444d2d3998cf0f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f0381694561100e524cc380f1a2c6050c7192f27ec8f27ad2e41abce969c1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24f0381694561100e524cc380f1a2c6050c7192f27ec8f27ad2e41abce969c1c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfb30493a0c650f9536bba6417da0a582b1b23b6488223359856c12d1024d57f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfb30493a0c650f9536bba6417da0a582b1b23b6488223359856c12d1024d57f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xclfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:59Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:59 crc kubenswrapper[4605]: I1001 13:45:59.711992 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wgx5p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c2ca71f-4cb0-4852-927d-af69be5d77f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c2a3ea80efc4b22dd64e7627c18212081f51882aa7616ba8bfa3f2b116f4bf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1180cac382ded8ae1a7be2e5738d96beceed10f750d31e36ae1520416a71e8dd\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T13:45:47Z\\\",\\\"message\\\":\\\"2025-10-01T13:45:01+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_f8cd6dc3-6a91-4298-bd9c-6dedaf40e61a\\\\n2025-10-01T13:45:01+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_f8cd6dc3-6a91-4298-bd9c-6dedaf40e61a to /host/opt/cni/bin/\\\\n2025-10-01T13:45:01Z [verbose] multus-daemon started\\\\n2025-10-01T13:45:01Z [verbose] Readiness Indicator file check\\\\n2025-10-01T13:45:46Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7gtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wgx5p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:59Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:59 crc kubenswrapper[4605]: I1001 13:45:59.733282 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://56f2691df87d1c843a73c06c39c7ad1d5477fb1f10c6f3f797f658e7d0c0d04d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09ecc57bc8186c325ef0bd38c17882024dda3714b52f9b0295a75ed93b2d6736\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f6f39aa4d204b41067d2705ce828e79752ca4e5b7689821e34c98f5041ce925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddba08349bbb0f7040bb04f37d4b5a137de327a5c4a7a5689396a2a1925b062a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://98143f416c776bf79ee24b42b70d88d1cbe4528bd7a1c0fc886a8f6e711c6400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8442849a4d37d618e701a2ea4c1dd33944894765c874fd5635c74d37a7f411b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0be438218d8ebb0fffe42a99bb838e7aa749b7749c1ca18fb294cecb4fe8b4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e41ab9ad83951f275a359365c118fdba9ea91417dc52a9cd98b0a55f84788c64\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T13:45:28Z\\\",\\\"message\\\":\\\"onal-cni-plugins-xclfn\\\\nI1001 13:45:28.238175 6140 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1001 13:45:28.238172 6140 lb_config.go:1031] Cluster endpoints for default/kubernetes for network=default are: map[TCP/https:{6443 [192.168.126.11] []}]\\\\nI1001 13:45:28.238207 6140 services_controller.go:443] Built service default/kubernetes LB cluster-wide configs for network=default: []services.lbConfig(nil)\\\\nI1001 13:45:28.238215 6140 services_controller.go:444] Built service default/kubernetes LB per-node configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.4.1\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:443, clusterEndpoints:services.lbEndpoints{Port:6443, V4IPs:[]string{\\\\\\\"192.168.126.11\\\\\\\"}, V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI1001 13:45:28.238228 6140 services_controller.go:445] Built service default/kubernetes LB template configs for network=default: []services.lbConfig(nil)\\\\nF1001 13:45:28.238233 6140 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handle\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:27Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d1219f1caadb9f0992fa56b158a23ace3616d178360c4690657deb351a3ccaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-kzv4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:59Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:59 crc kubenswrapper[4605]: I1001 13:45:59.743830 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:59 crc kubenswrapper[4605]: I1001 13:45:59.743877 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:59 crc kubenswrapper[4605]: I1001 13:45:59.743887 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:59 crc kubenswrapper[4605]: I1001 13:45:59.743906 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:59 crc kubenswrapper[4605]: I1001 13:45:59.743918 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:59Z","lastTransitionTime":"2025-10-01T13:45:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:59 crc kubenswrapper[4605]: I1001 13:45:59.748950 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gqbr2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a6400c9-7945-44a6-b37d-e94811fc9754\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c32729bcc338f810b05ad3ac0cd10feeeaff031cd81540b79fec759bbc2b419\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2jrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51a4fc25d3fb27e7ea7fba17367d9a0b3fefa388d306e3589ec13e3cf6e61452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2jrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:45:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-gqbr2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:59Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:59 crc kubenswrapper[4605]: I1001 13:45:59.767491 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d3d0d59e6a31efc844c56e1ad43cd326a7b2f1844784f2814469e36394cf377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:59Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:59 crc kubenswrapper[4605]: I1001 13:45:59.781852 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64e2eda677e75448f2e7e3fd477052f3a596e6c11d745848becc6c22f133e6b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:59Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:59 crc kubenswrapper[4605]: I1001 13:45:59.795571 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6zb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83630902-b99b-4944-81a4-487e9584e0c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0762e1125229327b00202fd05bc17fd641b76f2421e20d0672b3e2d3b0f7538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jt4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6zb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:59Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:59 crc kubenswrapper[4605]: I1001 13:45:59.809485 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jvqzn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4d5a988-e2c8-47db-b738-cb43467b1bfb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17326e690c100180a8e3d1a0180522378ef995bd1f5cba52d6f147a958351118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-985lh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:45:07Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jvqzn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:59Z is after 2025-08-24T17:21:41Z" Oct 01 13:45:59 crc kubenswrapper[4605]: I1001 13:45:59.847145 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:59 crc kubenswrapper[4605]: I1001 13:45:59.847197 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:59 crc kubenswrapper[4605]: I1001 13:45:59.847210 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:59 crc kubenswrapper[4605]: I1001 13:45:59.847235 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:59 crc kubenswrapper[4605]: I1001 13:45:59.847247 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:59Z","lastTransitionTime":"2025-10-01T13:45:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:45:59 crc kubenswrapper[4605]: I1001 13:45:59.926292 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:45:59 crc kubenswrapper[4605]: I1001 13:45:59.926386 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-m7ph7" Oct 01 13:45:59 crc kubenswrapper[4605]: I1001 13:45:59.926320 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 13:45:59 crc kubenswrapper[4605]: E1001 13:45:59.926584 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 13:45:59 crc kubenswrapper[4605]: I1001 13:45:59.926632 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 13:45:59 crc kubenswrapper[4605]: E1001 13:45:59.926819 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 13:45:59 crc kubenswrapper[4605]: E1001 13:45:59.927052 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 13:45:59 crc kubenswrapper[4605]: E1001 13:45:59.927233 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-m7ph7" podUID="8c172ce5-f64e-417d-9fc7-e06c5e443fbc" Oct 01 13:45:59 crc kubenswrapper[4605]: I1001 13:45:59.950938 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:45:59 crc kubenswrapper[4605]: I1001 13:45:59.951003 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:45:59 crc kubenswrapper[4605]: I1001 13:45:59.951029 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:45:59 crc kubenswrapper[4605]: I1001 13:45:59.951067 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:45:59 crc kubenswrapper[4605]: I1001 13:45:59.951129 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:45:59Z","lastTransitionTime":"2025-10-01T13:45:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:00 crc kubenswrapper[4605]: I1001 13:46:00.054608 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:00 crc kubenswrapper[4605]: I1001 13:46:00.054685 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:00 crc kubenswrapper[4605]: I1001 13:46:00.054702 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:00 crc kubenswrapper[4605]: I1001 13:46:00.054759 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:00 crc kubenswrapper[4605]: I1001 13:46:00.054775 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:00Z","lastTransitionTime":"2025-10-01T13:46:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:00 crc kubenswrapper[4605]: I1001 13:46:00.159911 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:00 crc kubenswrapper[4605]: I1001 13:46:00.159966 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:00 crc kubenswrapper[4605]: I1001 13:46:00.159984 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:00 crc kubenswrapper[4605]: I1001 13:46:00.160010 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:00 crc kubenswrapper[4605]: I1001 13:46:00.160030 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:00Z","lastTransitionTime":"2025-10-01T13:46:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:00 crc kubenswrapper[4605]: I1001 13:46:00.263244 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:00 crc kubenswrapper[4605]: I1001 13:46:00.263278 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:00 crc kubenswrapper[4605]: I1001 13:46:00.263288 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:00 crc kubenswrapper[4605]: I1001 13:46:00.263303 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:00 crc kubenswrapper[4605]: I1001 13:46:00.263312 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:00Z","lastTransitionTime":"2025-10-01T13:46:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:00 crc kubenswrapper[4605]: I1001 13:46:00.366591 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:00 crc kubenswrapper[4605]: I1001 13:46:00.366632 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:00 crc kubenswrapper[4605]: I1001 13:46:00.366646 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:00 crc kubenswrapper[4605]: I1001 13:46:00.366669 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:00 crc kubenswrapper[4605]: I1001 13:46:00.366683 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:00Z","lastTransitionTime":"2025-10-01T13:46:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:00 crc kubenswrapper[4605]: I1001 13:46:00.470159 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:00 crc kubenswrapper[4605]: I1001 13:46:00.470216 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:00 crc kubenswrapper[4605]: I1001 13:46:00.470234 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:00 crc kubenswrapper[4605]: I1001 13:46:00.470253 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:00 crc kubenswrapper[4605]: I1001 13:46:00.470268 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:00Z","lastTransitionTime":"2025-10-01T13:46:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:00 crc kubenswrapper[4605]: I1001 13:46:00.554409 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-kzv4p_e0b90c02-c41c-4f5b-ae0a-c6444435a3ae/ovnkube-controller/3.log" Oct 01 13:46:00 crc kubenswrapper[4605]: I1001 13:46:00.559588 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-kzv4p_e0b90c02-c41c-4f5b-ae0a-c6444435a3ae/ovnkube-controller/2.log" Oct 01 13:46:00 crc kubenswrapper[4605]: I1001 13:46:00.565584 4605 generic.go:334] "Generic (PLEG): container finished" podID="e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" containerID="0be438218d8ebb0fffe42a99bb838e7aa749b7749c1ca18fb294cecb4fe8b4e4" exitCode=1 Oct 01 13:46:00 crc kubenswrapper[4605]: I1001 13:46:00.565635 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" event={"ID":"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae","Type":"ContainerDied","Data":"0be438218d8ebb0fffe42a99bb838e7aa749b7749c1ca18fb294cecb4fe8b4e4"} Oct 01 13:46:00 crc kubenswrapper[4605]: I1001 13:46:00.565679 4605 scope.go:117] "RemoveContainer" containerID="e41ab9ad83951f275a359365c118fdba9ea91417dc52a9cd98b0a55f84788c64" Oct 01 13:46:00 crc kubenswrapper[4605]: I1001 13:46:00.566428 4605 scope.go:117] "RemoveContainer" containerID="0be438218d8ebb0fffe42a99bb838e7aa749b7749c1ca18fb294cecb4fe8b4e4" Oct 01 13:46:00 crc kubenswrapper[4605]: E1001 13:46:00.566614 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-kzv4p_openshift-ovn-kubernetes(e0b90c02-c41c-4f5b-ae0a-c6444435a3ae)\"" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" podUID="e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" Oct 01 13:46:00 crc kubenswrapper[4605]: I1001 13:46:00.576030 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:00 crc kubenswrapper[4605]: I1001 13:46:00.576064 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:00 crc kubenswrapper[4605]: I1001 13:46:00.576074 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:00 crc kubenswrapper[4605]: I1001 13:46:00.576087 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:00 crc kubenswrapper[4605]: I1001 13:46:00.576114 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:00Z","lastTransitionTime":"2025-10-01T13:46:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:00 crc kubenswrapper[4605]: I1001 13:46:00.588432 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c3d55cda00f6c5763662b3f96b4c36f1fb8c220fcd14ff3469b565deac718a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e6578a529a458eab6242d8f667520303c65bd53ceba7598d4c9680c7a93bace\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:46:00Z is after 2025-08-24T17:21:41Z" Oct 01 13:46:00 crc kubenswrapper[4605]: I1001 13:46:00.600923 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:46:00Z is after 2025-08-24T17:21:41Z" Oct 01 13:46:00 crc kubenswrapper[4605]: I1001 13:46:00.615167 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:46:00Z is after 2025-08-24T17:21:41Z" Oct 01 13:46:00 crc kubenswrapper[4605]: I1001 13:46:00.628910 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-m7ph7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8c172ce5-f64e-417d-9fc7-e06c5e443fbc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5krq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5krq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:45:13Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-m7ph7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:46:00Z is after 2025-08-24T17:21:41Z" Oct 01 13:46:00 crc kubenswrapper[4605]: I1001 13:46:00.641846 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3023060-c8ae-492b-b1cb-a418d9a8e59f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd1361d1d9cb03a3942918266a1e85d3e370eabdfa7b7b1e40971995928187b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae5b8e3f4bd159c632b04545707c7140ba6fcee21a3a3847d5e7f2b9e41b9178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdjh7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:46:00Z is after 2025-08-24T17:21:41Z" Oct 01 13:46:00 crc kubenswrapper[4605]: I1001 13:46:00.660906 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xclfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9fc4aae-03cb-458d-83cb-1a3ab9fa9639\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f8eab67511426f65e14781420480f85ea6b0d9ab25f6846a68820a1ded0053f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://14d2a7aa9a0027ef28b5ec7b288ce0a5367c53f616ed3e75216d07c9809a86a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14d2a7aa9a0027ef28b5ec7b288ce0a5367c53f616ed3e75216d07c9809a86a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0abe21aad2433d34c4a769e87e28f3c479511a4580df607d1496ed33c85385c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0abe21aad2433d34c4a769e87e28f3c479511a4580df607d1496ed33c85385c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b870108ecd4816a8d0d3d32f4b2384246c0b4537639e9cd04d660fea3246244\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b870108ecd4816a8d0d3d32f4b2384246c0b4537639e9cd04d660fea3246244\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b96314b06953b56383a86305cf1702c5f02add621d7981a36444d2d3998cf0f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b96314b06953b56383a86305cf1702c5f02add621d7981a36444d2d3998cf0f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f0381694561100e524cc380f1a2c6050c7192f27ec8f27ad2e41abce969c1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24f0381694561100e524cc380f1a2c6050c7192f27ec8f27ad2e41abce969c1c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfb30493a0c650f9536bba6417da0a582b1b23b6488223359856c12d1024d57f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfb30493a0c650f9536bba6417da0a582b1b23b6488223359856c12d1024d57f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xclfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:46:00Z is after 2025-08-24T17:21:41Z" Oct 01 13:46:00 crc kubenswrapper[4605]: I1001 13:46:00.677214 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wgx5p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c2ca71f-4cb0-4852-927d-af69be5d77f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c2a3ea80efc4b22dd64e7627c18212081f51882aa7616ba8bfa3f2b116f4bf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1180cac382ded8ae1a7be2e5738d96beceed10f750d31e36ae1520416a71e8dd\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T13:45:47Z\\\",\\\"message\\\":\\\"2025-10-01T13:45:01+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_f8cd6dc3-6a91-4298-bd9c-6dedaf40e61a\\\\n2025-10-01T13:45:01+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_f8cd6dc3-6a91-4298-bd9c-6dedaf40e61a to /host/opt/cni/bin/\\\\n2025-10-01T13:45:01Z [verbose] multus-daemon started\\\\n2025-10-01T13:45:01Z [verbose] Readiness Indicator file check\\\\n2025-10-01T13:45:46Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7gtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wgx5p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:46:00Z is after 2025-08-24T17:21:41Z" Oct 01 13:46:00 crc kubenswrapper[4605]: I1001 13:46:00.678911 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:00 crc kubenswrapper[4605]: I1001 13:46:00.678974 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:00 crc kubenswrapper[4605]: I1001 13:46:00.678991 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:00 crc kubenswrapper[4605]: I1001 13:46:00.679017 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:00 crc kubenswrapper[4605]: I1001 13:46:00.679034 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:00Z","lastTransitionTime":"2025-10-01T13:46:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:00 crc kubenswrapper[4605]: I1001 13:46:00.702114 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://56f2691df87d1c843a73c06c39c7ad1d5477fb1f10c6f3f797f658e7d0c0d04d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09ecc57bc8186c325ef0bd38c17882024dda3714b52f9b0295a75ed93b2d6736\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f6f39aa4d204b41067d2705ce828e79752ca4e5b7689821e34c98f5041ce925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddba08349bbb0f7040bb04f37d4b5a137de327a5c4a7a5689396a2a1925b062a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://98143f416c776bf79ee24b42b70d88d1cbe4528bd7a1c0fc886a8f6e711c6400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8442849a4d37d618e701a2ea4c1dd33944894765c874fd5635c74d37a7f411b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0be438218d8ebb0fffe42a99bb838e7aa749b7749c1ca18fb294cecb4fe8b4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e41ab9ad83951f275a359365c118fdba9ea91417dc52a9cd98b0a55f84788c64\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T13:45:28Z\\\",\\\"message\\\":\\\"onal-cni-plugins-xclfn\\\\nI1001 13:45:28.238175 6140 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1001 13:45:28.238172 6140 lb_config.go:1031] Cluster endpoints for default/kubernetes for network=default are: map[TCP/https:{6443 [192.168.126.11] []}]\\\\nI1001 13:45:28.238207 6140 services_controller.go:443] Built service default/kubernetes LB cluster-wide configs for network=default: []services.lbConfig(nil)\\\\nI1001 13:45:28.238215 6140 services_controller.go:444] Built service default/kubernetes LB per-node configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.4.1\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:443, clusterEndpoints:services.lbEndpoints{Port:6443, V4IPs:[]string{\\\\\\\"192.168.126.11\\\\\\\"}, V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI1001 13:45:28.238228 6140 services_controller.go:445] Built service default/kubernetes LB template configs for network=default: []services.lbConfig(nil)\\\\nF1001 13:45:28.238233 6140 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handle\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:27Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0be438218d8ebb0fffe42a99bb838e7aa749b7749c1ca18fb294cecb4fe8b4e4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T13:45:59Z\\\",\\\"message\\\":\\\"penshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gqbr2 in node crc\\\\nI1001 13:45:58.951592 6493 obj_retry.go:365] Adding new object: *v1.Pod openshift-kube-scheduler/openshift-kube-scheduler-crc\\\\nF1001 13:45:58.951380 6493 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:58Z is after 2025-08-24T17:21:41Z]\\\\nI1001 13:45:58.951603 6493 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/network-metrics-daemon-m7ph7\\\\nI1001 13:45:58.951606 6493 ovn.go:134] Ensuring zone local for Pod openshift-kube-scheduler/openshift-kube-scheduler-crc in node crc\\\\nI1001 13:45:58.951612 6493 obj_retry.go:365] Adding\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d1219f1caadb9f0992fa56b158a23ace3616d178360c4690657deb351a3ccaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-kzv4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:46:00Z is after 2025-08-24T17:21:41Z" Oct 01 13:46:00 crc kubenswrapper[4605]: I1001 13:46:00.715275 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gqbr2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a6400c9-7945-44a6-b37d-e94811fc9754\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c32729bcc338f810b05ad3ac0cd10feeeaff031cd81540b79fec759bbc2b419\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2jrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51a4fc25d3fb27e7ea7fba17367d9a0b3fefa388d306e3589ec13e3cf6e61452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2jrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:45:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-gqbr2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:46:00Z is after 2025-08-24T17:21:41Z" Oct 01 13:46:00 crc kubenswrapper[4605]: I1001 13:46:00.727235 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d3d0d59e6a31efc844c56e1ad43cd326a7b2f1844784f2814469e36394cf377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:46:00Z is after 2025-08-24T17:21:41Z" Oct 01 13:46:00 crc kubenswrapper[4605]: I1001 13:46:00.738695 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64e2eda677e75448f2e7e3fd477052f3a596e6c11d745848becc6c22f133e6b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:46:00Z is after 2025-08-24T17:21:41Z" Oct 01 13:46:00 crc kubenswrapper[4605]: I1001 13:46:00.749444 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6zb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83630902-b99b-4944-81a4-487e9584e0c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0762e1125229327b00202fd05bc17fd641b76f2421e20d0672b3e2d3b0f7538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jt4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6zb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:46:00Z is after 2025-08-24T17:21:41Z" Oct 01 13:46:00 crc kubenswrapper[4605]: I1001 13:46:00.760606 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jvqzn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4d5a988-e2c8-47db-b738-cb43467b1bfb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17326e690c100180a8e3d1a0180522378ef995bd1f5cba52d6f147a958351118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-985lh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:45:07Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jvqzn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:46:00Z is after 2025-08-24T17:21:41Z" Oct 01 13:46:00 crc kubenswrapper[4605]: I1001 13:46:00.775582 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:46:00Z is after 2025-08-24T17:21:41Z" Oct 01 13:46:00 crc kubenswrapper[4605]: I1001 13:46:00.781381 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:00 crc kubenswrapper[4605]: I1001 13:46:00.781407 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:00 crc kubenswrapper[4605]: I1001 13:46:00.781419 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:00 crc kubenswrapper[4605]: I1001 13:46:00.781433 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:00 crc kubenswrapper[4605]: I1001 13:46:00.781445 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:00Z","lastTransitionTime":"2025-10-01T13:46:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:00 crc kubenswrapper[4605]: I1001 13:46:00.792131 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eda516c-c3a2-4e46-b9c2-b603ebc2d618\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c15c835868f9e04648db64fcf271a4c212d406e7bcc7cfca15167b853a02a70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d0dbfcc18f63ddc136fc049c023f326cb882adbcf1123f1ec5d4b884ae3970a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed56fd63f436435206d3574740efba75e8a94582fe128388e03b398131acd4ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80bd23f5be42dc5714bba2b0f742b8aa9fdd60540db1048054e76c00f356b240\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83498000b4c2e2ea1b498069626fa6916e9fc5e95a3ecf0136b2cb0ca8a409b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759326292\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759326291\\\\\\\\\\\\\\\" (2025-10-01 12:44:51 +0000 UTC to 2026-10-01 12:44:51 +0000 UTC (now=2025-10-01 13:44:57.347830124 +0000 UTC))\\\\\\\"\\\\nI1001 13:44:57.347872 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1001 13:44:57.347895 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1001 13:44:57.347920 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347949 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347989 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4229714179/tls.crt::/tmp/serving-cert-4229714179/tls.key\\\\\\\"\\\\nI1001 13:44:57.348126 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348139 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1001 13:44:57.348152 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348158 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1001 13:44:57.348154 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1001 13:44:57.348212 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1001 13:44:57.348222 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF1001 13:44:57.350577 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:41Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://113f3599c725570c4484ed005921c30a43db5ffec24b72907c6c7546453fe363\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:46:00Z is after 2025-08-24T17:21:41Z" Oct 01 13:46:00 crc kubenswrapper[4605]: I1001 13:46:00.804355 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ca1d91d-0902-4d3a-b66a-a556b5009d8b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acf6d9c6b834cf378303c7ee6e1af3f3cde2502d8f28a6e5d3ec33deb69434b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4200a3723d31af3d800fca144949b047d3ef2960d856f286899351523593061\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://671dc002b0ca1a50b36373cbf0a8971b0f751989c9f19acedb524b09afd53517\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ab08a748b9c3040ea1af963f8ebeef630d7fb260122baba05229615424850d6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:46:00Z is after 2025-08-24T17:21:41Z" Oct 01 13:46:00 crc kubenswrapper[4605]: I1001 13:46:00.816098 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e761e16e-22e8-4656-94dc-0911ae6f6f05\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c3d4d08023677ab885a54e3844be97affb4cf2b7629495bd3ff5c2ca2ea572e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb5327594e9731b96a498f5d9b0c5011693e2c7ef4af097649f9a4fce12aaf24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e397c3307f660a4316c953e5b4a6440a8fd584b7d06b514833a2514c99f5d22b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2705745b03d5e0b8fdb38b0828944ce25553b45328d0ffac4762c3007c37a376\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2705745b03d5e0b8fdb38b0828944ce25553b45328d0ffac4762c3007c37a376\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:46:00Z is after 2025-08-24T17:21:41Z" Oct 01 13:46:00 crc kubenswrapper[4605]: I1001 13:46:00.884055 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:00 crc kubenswrapper[4605]: I1001 13:46:00.884087 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:00 crc kubenswrapper[4605]: I1001 13:46:00.884123 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:00 crc kubenswrapper[4605]: I1001 13:46:00.884142 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:00 crc kubenswrapper[4605]: I1001 13:46:00.884151 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:00Z","lastTransitionTime":"2025-10-01T13:46:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:00 crc kubenswrapper[4605]: I1001 13:46:00.987404 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:00 crc kubenswrapper[4605]: I1001 13:46:00.987474 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:00 crc kubenswrapper[4605]: I1001 13:46:00.987492 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:00 crc kubenswrapper[4605]: I1001 13:46:00.987515 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:00 crc kubenswrapper[4605]: I1001 13:46:00.987532 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:00Z","lastTransitionTime":"2025-10-01T13:46:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:01 crc kubenswrapper[4605]: I1001 13:46:01.089987 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:01 crc kubenswrapper[4605]: I1001 13:46:01.090051 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:01 crc kubenswrapper[4605]: I1001 13:46:01.090067 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:01 crc kubenswrapper[4605]: I1001 13:46:01.090120 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:01 crc kubenswrapper[4605]: I1001 13:46:01.090139 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:01Z","lastTransitionTime":"2025-10-01T13:46:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:01 crc kubenswrapper[4605]: I1001 13:46:01.194074 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:01 crc kubenswrapper[4605]: I1001 13:46:01.194160 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:01 crc kubenswrapper[4605]: I1001 13:46:01.194172 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:01 crc kubenswrapper[4605]: I1001 13:46:01.194190 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:01 crc kubenswrapper[4605]: I1001 13:46:01.194204 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:01Z","lastTransitionTime":"2025-10-01T13:46:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:01 crc kubenswrapper[4605]: I1001 13:46:01.296351 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:01 crc kubenswrapper[4605]: I1001 13:46:01.296420 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:01 crc kubenswrapper[4605]: I1001 13:46:01.296430 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:01 crc kubenswrapper[4605]: I1001 13:46:01.296451 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:01 crc kubenswrapper[4605]: I1001 13:46:01.296465 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:01Z","lastTransitionTime":"2025-10-01T13:46:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:01 crc kubenswrapper[4605]: I1001 13:46:01.400270 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:01 crc kubenswrapper[4605]: I1001 13:46:01.400334 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:01 crc kubenswrapper[4605]: I1001 13:46:01.400352 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:01 crc kubenswrapper[4605]: I1001 13:46:01.400378 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:01 crc kubenswrapper[4605]: I1001 13:46:01.400403 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:01Z","lastTransitionTime":"2025-10-01T13:46:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:01 crc kubenswrapper[4605]: I1001 13:46:01.503569 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:01 crc kubenswrapper[4605]: I1001 13:46:01.503642 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:01 crc kubenswrapper[4605]: I1001 13:46:01.503661 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:01 crc kubenswrapper[4605]: I1001 13:46:01.503699 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:01 crc kubenswrapper[4605]: I1001 13:46:01.503723 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:01Z","lastTransitionTime":"2025-10-01T13:46:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:01 crc kubenswrapper[4605]: I1001 13:46:01.573466 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-kzv4p_e0b90c02-c41c-4f5b-ae0a-c6444435a3ae/ovnkube-controller/3.log" Oct 01 13:46:01 crc kubenswrapper[4605]: I1001 13:46:01.580684 4605 scope.go:117] "RemoveContainer" containerID="0be438218d8ebb0fffe42a99bb838e7aa749b7749c1ca18fb294cecb4fe8b4e4" Oct 01 13:46:01 crc kubenswrapper[4605]: E1001 13:46:01.580932 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-kzv4p_openshift-ovn-kubernetes(e0b90c02-c41c-4f5b-ae0a-c6444435a3ae)\"" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" podUID="e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" Oct 01 13:46:01 crc kubenswrapper[4605]: I1001 13:46:01.603212 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c3d55cda00f6c5763662b3f96b4c36f1fb8c220fcd14ff3469b565deac718a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e6578a529a458eab6242d8f667520303c65bd53ceba7598d4c9680c7a93bace\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:46:01Z is after 2025-08-24T17:21:41Z" Oct 01 13:46:01 crc kubenswrapper[4605]: I1001 13:46:01.607504 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:01 crc kubenswrapper[4605]: I1001 13:46:01.607558 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:01 crc kubenswrapper[4605]: I1001 13:46:01.607578 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:01 crc kubenswrapper[4605]: I1001 13:46:01.607603 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:01 crc kubenswrapper[4605]: I1001 13:46:01.607622 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:01Z","lastTransitionTime":"2025-10-01T13:46:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:01 crc kubenswrapper[4605]: I1001 13:46:01.621695 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:46:01Z is after 2025-08-24T17:21:41Z" Oct 01 13:46:01 crc kubenswrapper[4605]: I1001 13:46:01.643936 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:46:01Z is after 2025-08-24T17:21:41Z" Oct 01 13:46:01 crc kubenswrapper[4605]: I1001 13:46:01.665983 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-m7ph7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8c172ce5-f64e-417d-9fc7-e06c5e443fbc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5krq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k5krq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:45:13Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-m7ph7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:46:01Z is after 2025-08-24T17:21:41Z" Oct 01 13:46:01 crc kubenswrapper[4605]: I1001 13:46:01.685370 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3023060-c8ae-492b-b1cb-a418d9a8e59f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd1361d1d9cb03a3942918266a1e85d3e370eabdfa7b7b1e40971995928187b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae5b8e3f4bd159c632b04545707c7140ba6fcee21a3a3847d5e7f2b9e41b9178\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4cqjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdjh7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:46:01Z is after 2025-08-24T17:21:41Z" Oct 01 13:46:01 crc kubenswrapper[4605]: I1001 13:46:01.706812 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-xclfn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9fc4aae-03cb-458d-83cb-1a3ab9fa9639\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f8eab67511426f65e14781420480f85ea6b0d9ab25f6846a68820a1ded0053f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://14d2a7aa9a0027ef28b5ec7b288ce0a5367c53f616ed3e75216d07c9809a86a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://14d2a7aa9a0027ef28b5ec7b288ce0a5367c53f616ed3e75216d07c9809a86a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0abe21aad2433d34c4a769e87e28f3c479511a4580df607d1496ed33c85385c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0abe21aad2433d34c4a769e87e28f3c479511a4580df607d1496ed33c85385c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b870108ecd4816a8d0d3d32f4b2384246c0b4537639e9cd04d660fea3246244\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b870108ecd4816a8d0d3d32f4b2384246c0b4537639e9cd04d660fea3246244\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b96314b06953b56383a86305cf1702c5f02add621d7981a36444d2d3998cf0f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b96314b06953b56383a86305cf1702c5f02add621d7981a36444d2d3998cf0f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24f0381694561100e524cc380f1a2c6050c7192f27ec8f27ad2e41abce969c1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24f0381694561100e524cc380f1a2c6050c7192f27ec8f27ad2e41abce969c1c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfb30493a0c650f9536bba6417da0a582b1b23b6488223359856c12d1024d57f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfb30493a0c650f9536bba6417da0a582b1b23b6488223359856c12d1024d57f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tcms8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-xclfn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:46:01Z is after 2025-08-24T17:21:41Z" Oct 01 13:46:01 crc kubenswrapper[4605]: I1001 13:46:01.711960 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:01 crc kubenswrapper[4605]: I1001 13:46:01.712007 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:01 crc kubenswrapper[4605]: I1001 13:46:01.712019 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:01 crc kubenswrapper[4605]: I1001 13:46:01.712038 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:01 crc kubenswrapper[4605]: I1001 13:46:01.712052 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:01Z","lastTransitionTime":"2025-10-01T13:46:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:01 crc kubenswrapper[4605]: I1001 13:46:01.725227 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wgx5p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c2ca71f-4cb0-4852-927d-af69be5d77f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c2a3ea80efc4b22dd64e7627c18212081f51882aa7616ba8bfa3f2b116f4bf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1180cac382ded8ae1a7be2e5738d96beceed10f750d31e36ae1520416a71e8dd\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T13:45:47Z\\\",\\\"message\\\":\\\"2025-10-01T13:45:01+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_f8cd6dc3-6a91-4298-bd9c-6dedaf40e61a\\\\n2025-10-01T13:45:01+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_f8cd6dc3-6a91-4298-bd9c-6dedaf40e61a to /host/opt/cni/bin/\\\\n2025-10-01T13:45:01Z [verbose] multus-daemon started\\\\n2025-10-01T13:45:01Z [verbose] Readiness Indicator file check\\\\n2025-10-01T13:45:46Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7gtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wgx5p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:46:01Z is after 2025-08-24T17:21:41Z" Oct 01 13:46:01 crc kubenswrapper[4605]: I1001 13:46:01.752576 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://56f2691df87d1c843a73c06c39c7ad1d5477fb1f10c6f3f797f658e7d0c0d04d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09ecc57bc8186c325ef0bd38c17882024dda3714b52f9b0295a75ed93b2d6736\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f6f39aa4d204b41067d2705ce828e79752ca4e5b7689821e34c98f5041ce925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddba08349bbb0f7040bb04f37d4b5a137de327a5c4a7a5689396a2a1925b062a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://98143f416c776bf79ee24b42b70d88d1cbe4528bd7a1c0fc886a8f6e711c6400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8442849a4d37d618e701a2ea4c1dd33944894765c874fd5635c74d37a7f411b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0be438218d8ebb0fffe42a99bb838e7aa749b7749c1ca18fb294cecb4fe8b4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0be438218d8ebb0fffe42a99bb838e7aa749b7749c1ca18fb294cecb4fe8b4e4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T13:45:59Z\\\",\\\"message\\\":\\\"penshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gqbr2 in node crc\\\\nI1001 13:45:58.951592 6493 obj_retry.go:365] Adding new object: *v1.Pod openshift-kube-scheduler/openshift-kube-scheduler-crc\\\\nF1001 13:45:58.951380 6493 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:45:58Z is after 2025-08-24T17:21:41Z]\\\\nI1001 13:45:58.951603 6493 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/network-metrics-daemon-m7ph7\\\\nI1001 13:45:58.951606 6493 ovn.go:134] Ensuring zone local for Pod openshift-kube-scheduler/openshift-kube-scheduler-crc in node crc\\\\nI1001 13:45:58.951612 6493 obj_retry.go:365] Adding\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:58Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-kzv4p_openshift-ovn-kubernetes(e0b90c02-c41c-4f5b-ae0a-c6444435a3ae)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d1219f1caadb9f0992fa56b158a23ace3616d178360c4690657deb351a3ccaa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmn8q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-kzv4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:46:01Z is after 2025-08-24T17:21:41Z" Oct 01 13:46:01 crc kubenswrapper[4605]: I1001 13:46:01.771709 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gqbr2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a6400c9-7945-44a6-b37d-e94811fc9754\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c32729bcc338f810b05ad3ac0cd10feeeaff031cd81540b79fec759bbc2b419\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2jrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51a4fc25d3fb27e7ea7fba17367d9a0b3fefa388d306e3589ec13e3cf6e61452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2jrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:45:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-gqbr2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:46:01Z is after 2025-08-24T17:21:41Z" Oct 01 13:46:01 crc kubenswrapper[4605]: I1001 13:46:01.791197 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d3d0d59e6a31efc844c56e1ad43cd326a7b2f1844784f2814469e36394cf377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:46:01Z is after 2025-08-24T17:21:41Z" Oct 01 13:46:01 crc kubenswrapper[4605]: I1001 13:46:01.813310 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64e2eda677e75448f2e7e3fd477052f3a596e6c11d745848becc6c22f133e6b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:46:01Z is after 2025-08-24T17:21:41Z" Oct 01 13:46:01 crc kubenswrapper[4605]: I1001 13:46:01.815366 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:01 crc kubenswrapper[4605]: I1001 13:46:01.815430 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:01 crc kubenswrapper[4605]: I1001 13:46:01.815448 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:01 crc kubenswrapper[4605]: I1001 13:46:01.815476 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:01 crc kubenswrapper[4605]: I1001 13:46:01.815494 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:01Z","lastTransitionTime":"2025-10-01T13:46:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:01 crc kubenswrapper[4605]: I1001 13:46:01.832953 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-6zb6l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83630902-b99b-4944-81a4-487e9584e0c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b0762e1125229327b00202fd05bc17fd641b76f2421e20d0672b3e2d3b0f7538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jt4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:59Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-6zb6l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:46:01Z is after 2025-08-24T17:21:41Z" Oct 01 13:46:01 crc kubenswrapper[4605]: I1001 13:46:01.847592 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-jvqzn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4d5a988-e2c8-47db-b738-cb43467b1bfb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17326e690c100180a8e3d1a0180522378ef995bd1f5cba52d6f147a958351118\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:45:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-985lh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:45:07Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-jvqzn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:46:01Z is after 2025-08-24T17:21:41Z" Oct 01 13:46:01 crc kubenswrapper[4605]: I1001 13:46:01.851762 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:46:01 crc kubenswrapper[4605]: I1001 13:46:01.851950 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:46:01 crc kubenswrapper[4605]: E1001 13:46:01.851992 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 13:47:05.851964164 +0000 UTC m=+148.595940382 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:46:01 crc kubenswrapper[4605]: I1001 13:46:01.852055 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:46:01 crc kubenswrapper[4605]: E1001 13:46:01.852084 4605 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 01 13:46:01 crc kubenswrapper[4605]: I1001 13:46:01.852132 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 13:46:01 crc kubenswrapper[4605]: E1001 13:46:01.852176 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-01 13:47:05.85216031 +0000 UTC m=+148.596136528 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 01 13:46:01 crc kubenswrapper[4605]: I1001 13:46:01.852207 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 13:46:01 crc kubenswrapper[4605]: E1001 13:46:01.852283 4605 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 01 13:46:01 crc kubenswrapper[4605]: E1001 13:46:01.852365 4605 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 01 13:46:01 crc kubenswrapper[4605]: E1001 13:46:01.852390 4605 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 01 13:46:01 crc kubenswrapper[4605]: E1001 13:46:01.852412 4605 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 13:46:01 crc kubenswrapper[4605]: E1001 13:46:01.852411 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-01 13:47:05.852390356 +0000 UTC m=+148.596366564 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 01 13:46:01 crc kubenswrapper[4605]: E1001 13:46:01.852304 4605 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 01 13:46:01 crc kubenswrapper[4605]: E1001 13:46:01.852468 4605 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 01 13:46:01 crc kubenswrapper[4605]: E1001 13:46:01.852480 4605 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 13:46:01 crc kubenswrapper[4605]: E1001 13:46:01.852540 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-01 13:47:05.85252713 +0000 UTC m=+148.596503348 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 13:46:01 crc kubenswrapper[4605]: E1001 13:46:01.852559 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-01 13:47:05.85255094 +0000 UTC m=+148.596527158 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 13:46:01 crc kubenswrapper[4605]: I1001 13:46:01.862801 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eda516c-c3a2-4e46-b9c2-b603ebc2d618\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c15c835868f9e04648db64fcf271a4c212d406e7bcc7cfca15167b853a02a70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d0dbfcc18f63ddc136fc049c023f326cb882adbcf1123f1ec5d4b884ae3970a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed56fd63f436435206d3574740efba75e8a94582fe128388e03b398131acd4ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80bd23f5be42dc5714bba2b0f742b8aa9fdd60540db1048054e76c00f356b240\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83498000b4c2e2ea1b498069626fa6916e9fc5e95a3ecf0136b2cb0ca8a409b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1759326292\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1759326291\\\\\\\\\\\\\\\" (2025-10-01 12:44:51 +0000 UTC to 2026-10-01 12:44:51 +0000 UTC (now=2025-10-01 13:44:57.347830124 +0000 UTC))\\\\\\\"\\\\nI1001 13:44:57.347872 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1001 13:44:57.347895 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1001 13:44:57.347920 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347949 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1001 13:44:57.347989 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4229714179/tls.crt::/tmp/serving-cert-4229714179/tls.key\\\\\\\"\\\\nI1001 13:44:57.348126 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348139 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1001 13:44:57.348152 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1001 13:44:57.348158 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1001 13:44:57.348154 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1001 13:44:57.348212 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1001 13:44:57.348222 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nF1001 13:44:57.350577 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:41Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://113f3599c725570c4484ed005921c30a43db5ffec24b72907c6c7546453fe363\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd5797f6792f7a4b792ca57dfda77899c3bcd3e8f48ed85a88efe8be128d891\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:46:01Z is after 2025-08-24T17:21:41Z" Oct 01 13:46:01 crc kubenswrapper[4605]: I1001 13:46:01.878687 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ca1d91d-0902-4d3a-b66a-a556b5009d8b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acf6d9c6b834cf378303c7ee6e1af3f3cde2502d8f28a6e5d3ec33deb69434b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4200a3723d31af3d800fca144949b047d3ef2960d856f286899351523593061\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://671dc002b0ca1a50b36373cbf0a8971b0f751989c9f19acedb524b09afd53517\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ab08a748b9c3040ea1af963f8ebeef630d7fb260122baba05229615424850d6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:46:01Z is after 2025-08-24T17:21:41Z" Oct 01 13:46:01 crc kubenswrapper[4605]: I1001 13:46:01.893606 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e761e16e-22e8-4656-94dc-0911ae6f6f05\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:45:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c3d4d08023677ab885a54e3844be97affb4cf2b7629495bd3ff5c2ca2ea572e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb5327594e9731b96a498f5d9b0c5011693e2c7ef4af097649f9a4fce12aaf24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e397c3307f660a4316c953e5b4a6440a8fd584b7d06b514833a2514c99f5d22b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T13:44:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2705745b03d5e0b8fdb38b0828944ce25553b45328d0ffac4762c3007c37a376\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2705745b03d5e0b8fdb38b0828944ce25553b45328d0ffac4762c3007c37a376\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T13:44:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T13:44:39Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T13:44:38Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:46:01Z is after 2025-08-24T17:21:41Z" Oct 01 13:46:01 crc kubenswrapper[4605]: I1001 13:46:01.908780 4605 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:46:01Z is after 2025-08-24T17:21:41Z" Oct 01 13:46:01 crc kubenswrapper[4605]: I1001 13:46:01.918047 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:01 crc kubenswrapper[4605]: I1001 13:46:01.918144 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:01 crc kubenswrapper[4605]: I1001 13:46:01.918162 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:01 crc kubenswrapper[4605]: I1001 13:46:01.918185 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:01 crc kubenswrapper[4605]: I1001 13:46:01.918202 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:01Z","lastTransitionTime":"2025-10-01T13:46:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:01 crc kubenswrapper[4605]: I1001 13:46:01.926439 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 13:46:01 crc kubenswrapper[4605]: I1001 13:46:01.926465 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-m7ph7" Oct 01 13:46:01 crc kubenswrapper[4605]: I1001 13:46:01.926450 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 13:46:01 crc kubenswrapper[4605]: E1001 13:46:01.926569 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 13:46:01 crc kubenswrapper[4605]: I1001 13:46:01.926586 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:46:01 crc kubenswrapper[4605]: E1001 13:46:01.926697 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-m7ph7" podUID="8c172ce5-f64e-417d-9fc7-e06c5e443fbc" Oct 01 13:46:01 crc kubenswrapper[4605]: E1001 13:46:01.926794 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 13:46:01 crc kubenswrapper[4605]: E1001 13:46:01.927056 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 13:46:01 crc kubenswrapper[4605]: I1001 13:46:01.942461 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Oct 01 13:46:02 crc kubenswrapper[4605]: I1001 13:46:02.021919 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:02 crc kubenswrapper[4605]: I1001 13:46:02.021989 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:02 crc kubenswrapper[4605]: I1001 13:46:02.022008 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:02 crc kubenswrapper[4605]: I1001 13:46:02.022037 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:02 crc kubenswrapper[4605]: I1001 13:46:02.022057 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:02Z","lastTransitionTime":"2025-10-01T13:46:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:02 crc kubenswrapper[4605]: I1001 13:46:02.124795 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:02 crc kubenswrapper[4605]: I1001 13:46:02.124858 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:02 crc kubenswrapper[4605]: I1001 13:46:02.124878 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:02 crc kubenswrapper[4605]: I1001 13:46:02.124906 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:02 crc kubenswrapper[4605]: I1001 13:46:02.124925 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:02Z","lastTransitionTime":"2025-10-01T13:46:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:02 crc kubenswrapper[4605]: I1001 13:46:02.228422 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:02 crc kubenswrapper[4605]: I1001 13:46:02.228501 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:02 crc kubenswrapper[4605]: I1001 13:46:02.228519 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:02 crc kubenswrapper[4605]: I1001 13:46:02.228547 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:02 crc kubenswrapper[4605]: I1001 13:46:02.228570 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:02Z","lastTransitionTime":"2025-10-01T13:46:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:02 crc kubenswrapper[4605]: I1001 13:46:02.331965 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:02 crc kubenswrapper[4605]: I1001 13:46:02.332415 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:02 crc kubenswrapper[4605]: I1001 13:46:02.332559 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:02 crc kubenswrapper[4605]: I1001 13:46:02.332704 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:02 crc kubenswrapper[4605]: I1001 13:46:02.332901 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:02Z","lastTransitionTime":"2025-10-01T13:46:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:02 crc kubenswrapper[4605]: I1001 13:46:02.435793 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:02 crc kubenswrapper[4605]: I1001 13:46:02.435833 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:02 crc kubenswrapper[4605]: I1001 13:46:02.435841 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:02 crc kubenswrapper[4605]: I1001 13:46:02.435858 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:02 crc kubenswrapper[4605]: I1001 13:46:02.435867 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:02Z","lastTransitionTime":"2025-10-01T13:46:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:02 crc kubenswrapper[4605]: I1001 13:46:02.538751 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:02 crc kubenswrapper[4605]: I1001 13:46:02.538792 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:02 crc kubenswrapper[4605]: I1001 13:46:02.538801 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:02 crc kubenswrapper[4605]: I1001 13:46:02.538818 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:02 crc kubenswrapper[4605]: I1001 13:46:02.538828 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:02Z","lastTransitionTime":"2025-10-01T13:46:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:02 crc kubenswrapper[4605]: I1001 13:46:02.642048 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:02 crc kubenswrapper[4605]: I1001 13:46:02.642112 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:02 crc kubenswrapper[4605]: I1001 13:46:02.642125 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:02 crc kubenswrapper[4605]: I1001 13:46:02.642145 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:02 crc kubenswrapper[4605]: I1001 13:46:02.642159 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:02Z","lastTransitionTime":"2025-10-01T13:46:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:02 crc kubenswrapper[4605]: I1001 13:46:02.747435 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:02 crc kubenswrapper[4605]: I1001 13:46:02.747493 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:02 crc kubenswrapper[4605]: I1001 13:46:02.747506 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:02 crc kubenswrapper[4605]: I1001 13:46:02.747553 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:02 crc kubenswrapper[4605]: I1001 13:46:02.747564 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:02Z","lastTransitionTime":"2025-10-01T13:46:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:02 crc kubenswrapper[4605]: I1001 13:46:02.850856 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:02 crc kubenswrapper[4605]: I1001 13:46:02.850939 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:02 crc kubenswrapper[4605]: I1001 13:46:02.850984 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:02 crc kubenswrapper[4605]: I1001 13:46:02.851013 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:02 crc kubenswrapper[4605]: I1001 13:46:02.851032 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:02Z","lastTransitionTime":"2025-10-01T13:46:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:02 crc kubenswrapper[4605]: I1001 13:46:02.955134 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:02 crc kubenswrapper[4605]: I1001 13:46:02.955186 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:02 crc kubenswrapper[4605]: I1001 13:46:02.955216 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:02 crc kubenswrapper[4605]: I1001 13:46:02.955244 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:02 crc kubenswrapper[4605]: I1001 13:46:02.955262 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:02Z","lastTransitionTime":"2025-10-01T13:46:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:03 crc kubenswrapper[4605]: I1001 13:46:03.058383 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:03 crc kubenswrapper[4605]: I1001 13:46:03.058449 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:03 crc kubenswrapper[4605]: I1001 13:46:03.058470 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:03 crc kubenswrapper[4605]: I1001 13:46:03.058493 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:03 crc kubenswrapper[4605]: I1001 13:46:03.058507 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:03Z","lastTransitionTime":"2025-10-01T13:46:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:03 crc kubenswrapper[4605]: I1001 13:46:03.161087 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:03 crc kubenswrapper[4605]: I1001 13:46:03.161171 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:03 crc kubenswrapper[4605]: I1001 13:46:03.161191 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:03 crc kubenswrapper[4605]: I1001 13:46:03.161216 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:03 crc kubenswrapper[4605]: I1001 13:46:03.161236 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:03Z","lastTransitionTime":"2025-10-01T13:46:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:03 crc kubenswrapper[4605]: I1001 13:46:03.263851 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:03 crc kubenswrapper[4605]: I1001 13:46:03.263909 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:03 crc kubenswrapper[4605]: I1001 13:46:03.263932 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:03 crc kubenswrapper[4605]: I1001 13:46:03.263956 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:03 crc kubenswrapper[4605]: I1001 13:46:03.263971 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:03Z","lastTransitionTime":"2025-10-01T13:46:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:03 crc kubenswrapper[4605]: I1001 13:46:03.367036 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:03 crc kubenswrapper[4605]: I1001 13:46:03.367081 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:03 crc kubenswrapper[4605]: I1001 13:46:03.367095 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:03 crc kubenswrapper[4605]: I1001 13:46:03.367139 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:03 crc kubenswrapper[4605]: I1001 13:46:03.367151 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:03Z","lastTransitionTime":"2025-10-01T13:46:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:03 crc kubenswrapper[4605]: I1001 13:46:03.470082 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:03 crc kubenswrapper[4605]: I1001 13:46:03.470143 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:03 crc kubenswrapper[4605]: I1001 13:46:03.470152 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:03 crc kubenswrapper[4605]: I1001 13:46:03.470166 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:03 crc kubenswrapper[4605]: I1001 13:46:03.470178 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:03Z","lastTransitionTime":"2025-10-01T13:46:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:03 crc kubenswrapper[4605]: I1001 13:46:03.573403 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:03 crc kubenswrapper[4605]: I1001 13:46:03.573475 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:03 crc kubenswrapper[4605]: I1001 13:46:03.573501 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:03 crc kubenswrapper[4605]: I1001 13:46:03.573627 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:03 crc kubenswrapper[4605]: I1001 13:46:03.573655 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:03Z","lastTransitionTime":"2025-10-01T13:46:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:03 crc kubenswrapper[4605]: I1001 13:46:03.676524 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:03 crc kubenswrapper[4605]: I1001 13:46:03.676561 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:03 crc kubenswrapper[4605]: I1001 13:46:03.676570 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:03 crc kubenswrapper[4605]: I1001 13:46:03.676583 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:03 crc kubenswrapper[4605]: I1001 13:46:03.676592 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:03Z","lastTransitionTime":"2025-10-01T13:46:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:03 crc kubenswrapper[4605]: I1001 13:46:03.779675 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:03 crc kubenswrapper[4605]: I1001 13:46:03.779929 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:03 crc kubenswrapper[4605]: I1001 13:46:03.779952 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:03 crc kubenswrapper[4605]: I1001 13:46:03.780608 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:03 crc kubenswrapper[4605]: I1001 13:46:03.780666 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:03Z","lastTransitionTime":"2025-10-01T13:46:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:03 crc kubenswrapper[4605]: I1001 13:46:03.885371 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:03 crc kubenswrapper[4605]: I1001 13:46:03.885426 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:03 crc kubenswrapper[4605]: I1001 13:46:03.885439 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:03 crc kubenswrapper[4605]: I1001 13:46:03.885459 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:03 crc kubenswrapper[4605]: I1001 13:46:03.885472 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:03Z","lastTransitionTime":"2025-10-01T13:46:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:03 crc kubenswrapper[4605]: I1001 13:46:03.926387 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 13:46:03 crc kubenswrapper[4605]: I1001 13:46:03.926462 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 13:46:03 crc kubenswrapper[4605]: I1001 13:46:03.926548 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:46:03 crc kubenswrapper[4605]: E1001 13:46:03.926696 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 13:46:03 crc kubenswrapper[4605]: I1001 13:46:03.926753 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-m7ph7" Oct 01 13:46:03 crc kubenswrapper[4605]: E1001 13:46:03.926921 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 13:46:03 crc kubenswrapper[4605]: E1001 13:46:03.926962 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-m7ph7" podUID="8c172ce5-f64e-417d-9fc7-e06c5e443fbc" Oct 01 13:46:03 crc kubenswrapper[4605]: E1001 13:46:03.927247 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 13:46:03 crc kubenswrapper[4605]: I1001 13:46:03.989211 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:03 crc kubenswrapper[4605]: I1001 13:46:03.989742 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:03 crc kubenswrapper[4605]: I1001 13:46:03.989760 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:03 crc kubenswrapper[4605]: I1001 13:46:03.989784 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:03 crc kubenswrapper[4605]: I1001 13:46:03.989801 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:03Z","lastTransitionTime":"2025-10-01T13:46:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:04 crc kubenswrapper[4605]: I1001 13:46:04.092398 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:04 crc kubenswrapper[4605]: I1001 13:46:04.092443 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:04 crc kubenswrapper[4605]: I1001 13:46:04.092452 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:04 crc kubenswrapper[4605]: I1001 13:46:04.092466 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:04 crc kubenswrapper[4605]: I1001 13:46:04.092477 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:04Z","lastTransitionTime":"2025-10-01T13:46:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:04 crc kubenswrapper[4605]: I1001 13:46:04.195273 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:04 crc kubenswrapper[4605]: I1001 13:46:04.195379 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:04 crc kubenswrapper[4605]: I1001 13:46:04.195398 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:04 crc kubenswrapper[4605]: I1001 13:46:04.195422 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:04 crc kubenswrapper[4605]: I1001 13:46:04.195464 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:04Z","lastTransitionTime":"2025-10-01T13:46:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:04 crc kubenswrapper[4605]: I1001 13:46:04.298801 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:04 crc kubenswrapper[4605]: I1001 13:46:04.298859 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:04 crc kubenswrapper[4605]: I1001 13:46:04.298876 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:04 crc kubenswrapper[4605]: I1001 13:46:04.298904 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:04 crc kubenswrapper[4605]: I1001 13:46:04.298922 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:04Z","lastTransitionTime":"2025-10-01T13:46:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:04 crc kubenswrapper[4605]: I1001 13:46:04.403553 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:04 crc kubenswrapper[4605]: I1001 13:46:04.403600 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:04 crc kubenswrapper[4605]: I1001 13:46:04.403612 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:04 crc kubenswrapper[4605]: I1001 13:46:04.403628 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:04 crc kubenswrapper[4605]: I1001 13:46:04.403638 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:04Z","lastTransitionTime":"2025-10-01T13:46:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:04 crc kubenswrapper[4605]: I1001 13:46:04.506372 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:04 crc kubenswrapper[4605]: I1001 13:46:04.506513 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:04 crc kubenswrapper[4605]: I1001 13:46:04.506538 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:04 crc kubenswrapper[4605]: I1001 13:46:04.506565 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:04 crc kubenswrapper[4605]: I1001 13:46:04.506584 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:04Z","lastTransitionTime":"2025-10-01T13:46:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:04 crc kubenswrapper[4605]: I1001 13:46:04.613621 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:04 crc kubenswrapper[4605]: I1001 13:46:04.613748 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:04 crc kubenswrapper[4605]: I1001 13:46:04.613802 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:04 crc kubenswrapper[4605]: I1001 13:46:04.613836 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:04 crc kubenswrapper[4605]: I1001 13:46:04.613858 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:04Z","lastTransitionTime":"2025-10-01T13:46:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:04 crc kubenswrapper[4605]: I1001 13:46:04.717243 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:04 crc kubenswrapper[4605]: I1001 13:46:04.717310 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:04 crc kubenswrapper[4605]: I1001 13:46:04.717328 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:04 crc kubenswrapper[4605]: I1001 13:46:04.717357 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:04 crc kubenswrapper[4605]: I1001 13:46:04.717377 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:04Z","lastTransitionTime":"2025-10-01T13:46:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:04 crc kubenswrapper[4605]: I1001 13:46:04.820508 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:04 crc kubenswrapper[4605]: I1001 13:46:04.820570 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:04 crc kubenswrapper[4605]: I1001 13:46:04.820588 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:04 crc kubenswrapper[4605]: I1001 13:46:04.820620 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:04 crc kubenswrapper[4605]: I1001 13:46:04.820642 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:04Z","lastTransitionTime":"2025-10-01T13:46:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:04 crc kubenswrapper[4605]: I1001 13:46:04.924298 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:04 crc kubenswrapper[4605]: I1001 13:46:04.924426 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:04 crc kubenswrapper[4605]: I1001 13:46:04.924453 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:04 crc kubenswrapper[4605]: I1001 13:46:04.924486 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:04 crc kubenswrapper[4605]: I1001 13:46:04.924508 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:04Z","lastTransitionTime":"2025-10-01T13:46:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:05 crc kubenswrapper[4605]: I1001 13:46:05.028094 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:05 crc kubenswrapper[4605]: I1001 13:46:05.028188 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:05 crc kubenswrapper[4605]: I1001 13:46:05.028207 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:05 crc kubenswrapper[4605]: I1001 13:46:05.028234 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:05 crc kubenswrapper[4605]: I1001 13:46:05.028254 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:05Z","lastTransitionTime":"2025-10-01T13:46:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:05 crc kubenswrapper[4605]: I1001 13:46:05.131033 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:05 crc kubenswrapper[4605]: I1001 13:46:05.131154 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:05 crc kubenswrapper[4605]: I1001 13:46:05.131167 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:05 crc kubenswrapper[4605]: I1001 13:46:05.131185 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:05 crc kubenswrapper[4605]: I1001 13:46:05.131195 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:05Z","lastTransitionTime":"2025-10-01T13:46:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:05 crc kubenswrapper[4605]: I1001 13:46:05.233939 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:05 crc kubenswrapper[4605]: I1001 13:46:05.234050 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:05 crc kubenswrapper[4605]: I1001 13:46:05.234069 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:05 crc kubenswrapper[4605]: I1001 13:46:05.234128 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:05 crc kubenswrapper[4605]: I1001 13:46:05.234150 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:05Z","lastTransitionTime":"2025-10-01T13:46:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:05 crc kubenswrapper[4605]: I1001 13:46:05.337060 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:05 crc kubenswrapper[4605]: I1001 13:46:05.337156 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:05 crc kubenswrapper[4605]: I1001 13:46:05.337176 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:05 crc kubenswrapper[4605]: I1001 13:46:05.337201 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:05 crc kubenswrapper[4605]: I1001 13:46:05.337221 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:05Z","lastTransitionTime":"2025-10-01T13:46:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:05 crc kubenswrapper[4605]: I1001 13:46:05.440401 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:05 crc kubenswrapper[4605]: I1001 13:46:05.440465 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:05 crc kubenswrapper[4605]: I1001 13:46:05.440482 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:05 crc kubenswrapper[4605]: I1001 13:46:05.440509 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:05 crc kubenswrapper[4605]: I1001 13:46:05.440528 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:05Z","lastTransitionTime":"2025-10-01T13:46:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:05 crc kubenswrapper[4605]: I1001 13:46:05.543531 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:05 crc kubenswrapper[4605]: I1001 13:46:05.543589 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:05 crc kubenswrapper[4605]: I1001 13:46:05.543605 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:05 crc kubenswrapper[4605]: I1001 13:46:05.543630 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:05 crc kubenswrapper[4605]: I1001 13:46:05.543650 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:05Z","lastTransitionTime":"2025-10-01T13:46:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:05 crc kubenswrapper[4605]: I1001 13:46:05.646661 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:05 crc kubenswrapper[4605]: I1001 13:46:05.646725 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:05 crc kubenswrapper[4605]: I1001 13:46:05.646744 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:05 crc kubenswrapper[4605]: I1001 13:46:05.646788 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:05 crc kubenswrapper[4605]: I1001 13:46:05.646812 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:05Z","lastTransitionTime":"2025-10-01T13:46:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:05 crc kubenswrapper[4605]: I1001 13:46:05.749187 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:05 crc kubenswrapper[4605]: I1001 13:46:05.749234 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:05 crc kubenswrapper[4605]: I1001 13:46:05.749244 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:05 crc kubenswrapper[4605]: I1001 13:46:05.749257 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:05 crc kubenswrapper[4605]: I1001 13:46:05.749266 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:05Z","lastTransitionTime":"2025-10-01T13:46:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:05 crc kubenswrapper[4605]: I1001 13:46:05.845740 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:05 crc kubenswrapper[4605]: I1001 13:46:05.845780 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:05 crc kubenswrapper[4605]: I1001 13:46:05.845791 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:05 crc kubenswrapper[4605]: I1001 13:46:05.845808 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:05 crc kubenswrapper[4605]: I1001 13:46:05.845822 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:05Z","lastTransitionTime":"2025-10-01T13:46:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:05 crc kubenswrapper[4605]: E1001 13:46:05.859358 4605 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:46:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:46:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:46:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:46:05Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:46:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:46:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:46:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:46:05Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1d17ca42-5162-4e53-b9d0-0c11f7d91daa\\\",\\\"systemUUID\\\":\\\"1ac84113-1352-4ad6-8d32-f12829b39b5d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:46:05Z is after 2025-08-24T17:21:41Z" Oct 01 13:46:05 crc kubenswrapper[4605]: I1001 13:46:05.864657 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:05 crc kubenswrapper[4605]: I1001 13:46:05.864733 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:05 crc kubenswrapper[4605]: I1001 13:46:05.864747 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:05 crc kubenswrapper[4605]: I1001 13:46:05.864786 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:05 crc kubenswrapper[4605]: I1001 13:46:05.864799 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:05Z","lastTransitionTime":"2025-10-01T13:46:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:05 crc kubenswrapper[4605]: E1001 13:46:05.879863 4605 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:46:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:46:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:46:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:46:05Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:46:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:46:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:46:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:46:05Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1d17ca42-5162-4e53-b9d0-0c11f7d91daa\\\",\\\"systemUUID\\\":\\\"1ac84113-1352-4ad6-8d32-f12829b39b5d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:46:05Z is after 2025-08-24T17:21:41Z" Oct 01 13:46:05 crc kubenswrapper[4605]: I1001 13:46:05.884057 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:05 crc kubenswrapper[4605]: I1001 13:46:05.884112 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:05 crc kubenswrapper[4605]: I1001 13:46:05.884122 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:05 crc kubenswrapper[4605]: I1001 13:46:05.884135 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:05 crc kubenswrapper[4605]: I1001 13:46:05.884145 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:05Z","lastTransitionTime":"2025-10-01T13:46:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:05 crc kubenswrapper[4605]: E1001 13:46:05.897542 4605 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:46:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:46:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:46:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:46:05Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:46:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:46:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:46:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:46:05Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1d17ca42-5162-4e53-b9d0-0c11f7d91daa\\\",\\\"systemUUID\\\":\\\"1ac84113-1352-4ad6-8d32-f12829b39b5d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:46:05Z is after 2025-08-24T17:21:41Z" Oct 01 13:46:05 crc kubenswrapper[4605]: I1001 13:46:05.901850 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:05 crc kubenswrapper[4605]: I1001 13:46:05.901886 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:05 crc kubenswrapper[4605]: I1001 13:46:05.901897 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:05 crc kubenswrapper[4605]: I1001 13:46:05.901914 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:05 crc kubenswrapper[4605]: I1001 13:46:05.901928 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:05Z","lastTransitionTime":"2025-10-01T13:46:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:05 crc kubenswrapper[4605]: E1001 13:46:05.914365 4605 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:46:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:46:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:46:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:46:05Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:46:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:46:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:46:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:46:05Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1d17ca42-5162-4e53-b9d0-0c11f7d91daa\\\",\\\"systemUUID\\\":\\\"1ac84113-1352-4ad6-8d32-f12829b39b5d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:46:05Z is after 2025-08-24T17:21:41Z" Oct 01 13:46:05 crc kubenswrapper[4605]: I1001 13:46:05.917658 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:05 crc kubenswrapper[4605]: I1001 13:46:05.917706 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:05 crc kubenswrapper[4605]: I1001 13:46:05.917717 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:05 crc kubenswrapper[4605]: I1001 13:46:05.917734 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:05 crc kubenswrapper[4605]: I1001 13:46:05.917745 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:05Z","lastTransitionTime":"2025-10-01T13:46:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:05 crc kubenswrapper[4605]: I1001 13:46:05.926326 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 13:46:05 crc kubenswrapper[4605]: I1001 13:46:05.926467 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-m7ph7" Oct 01 13:46:05 crc kubenswrapper[4605]: I1001 13:46:05.926518 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 13:46:05 crc kubenswrapper[4605]: E1001 13:46:05.926571 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 13:46:05 crc kubenswrapper[4605]: I1001 13:46:05.926815 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:46:05 crc kubenswrapper[4605]: E1001 13:46:05.926905 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 13:46:05 crc kubenswrapper[4605]: E1001 13:46:05.926977 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 13:46:05 crc kubenswrapper[4605]: E1001 13:46:05.927031 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-m7ph7" podUID="8c172ce5-f64e-417d-9fc7-e06c5e443fbc" Oct 01 13:46:05 crc kubenswrapper[4605]: E1001 13:46:05.928861 4605 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:46:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:46:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:46:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:46:05Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:46:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:46:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T13:46:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T13:46:05Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1d17ca42-5162-4e53-b9d0-0c11f7d91daa\\\",\\\"systemUUID\\\":\\\"1ac84113-1352-4ad6-8d32-f12829b39b5d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T13:46:05Z is after 2025-08-24T17:21:41Z" Oct 01 13:46:05 crc kubenswrapper[4605]: E1001 13:46:05.928966 4605 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 01 13:46:05 crc kubenswrapper[4605]: I1001 13:46:05.930537 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:05 crc kubenswrapper[4605]: I1001 13:46:05.930570 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:05 crc kubenswrapper[4605]: I1001 13:46:05.930579 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:05 crc kubenswrapper[4605]: I1001 13:46:05.930593 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:05 crc kubenswrapper[4605]: I1001 13:46:05.930602 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:05Z","lastTransitionTime":"2025-10-01T13:46:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:06 crc kubenswrapper[4605]: I1001 13:46:06.033265 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:06 crc kubenswrapper[4605]: I1001 13:46:06.033312 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:06 crc kubenswrapper[4605]: I1001 13:46:06.033340 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:06 crc kubenswrapper[4605]: I1001 13:46:06.033384 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:06 crc kubenswrapper[4605]: I1001 13:46:06.033394 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:06Z","lastTransitionTime":"2025-10-01T13:46:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:06 crc kubenswrapper[4605]: I1001 13:46:06.135758 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:06 crc kubenswrapper[4605]: I1001 13:46:06.135793 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:06 crc kubenswrapper[4605]: I1001 13:46:06.135805 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:06 crc kubenswrapper[4605]: I1001 13:46:06.135821 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:06 crc kubenswrapper[4605]: I1001 13:46:06.135833 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:06Z","lastTransitionTime":"2025-10-01T13:46:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:06 crc kubenswrapper[4605]: I1001 13:46:06.239163 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:06 crc kubenswrapper[4605]: I1001 13:46:06.239272 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:06 crc kubenswrapper[4605]: I1001 13:46:06.239284 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:06 crc kubenswrapper[4605]: I1001 13:46:06.239301 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:06 crc kubenswrapper[4605]: I1001 13:46:06.239312 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:06Z","lastTransitionTime":"2025-10-01T13:46:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:06 crc kubenswrapper[4605]: I1001 13:46:06.342127 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:06 crc kubenswrapper[4605]: I1001 13:46:06.342168 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:06 crc kubenswrapper[4605]: I1001 13:46:06.342181 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:06 crc kubenswrapper[4605]: I1001 13:46:06.342204 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:06 crc kubenswrapper[4605]: I1001 13:46:06.342226 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:06Z","lastTransitionTime":"2025-10-01T13:46:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:06 crc kubenswrapper[4605]: I1001 13:46:06.446395 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:06 crc kubenswrapper[4605]: I1001 13:46:06.446470 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:06 crc kubenswrapper[4605]: I1001 13:46:06.446488 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:06 crc kubenswrapper[4605]: I1001 13:46:06.446511 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:06 crc kubenswrapper[4605]: I1001 13:46:06.446531 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:06Z","lastTransitionTime":"2025-10-01T13:46:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:06 crc kubenswrapper[4605]: I1001 13:46:06.549660 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:06 crc kubenswrapper[4605]: I1001 13:46:06.549769 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:06 crc kubenswrapper[4605]: I1001 13:46:06.549786 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:06 crc kubenswrapper[4605]: I1001 13:46:06.549809 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:06 crc kubenswrapper[4605]: I1001 13:46:06.549826 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:06Z","lastTransitionTime":"2025-10-01T13:46:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:06 crc kubenswrapper[4605]: I1001 13:46:06.653077 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:06 crc kubenswrapper[4605]: I1001 13:46:06.653185 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:06 crc kubenswrapper[4605]: I1001 13:46:06.653210 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:06 crc kubenswrapper[4605]: I1001 13:46:06.653239 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:06 crc kubenswrapper[4605]: I1001 13:46:06.653261 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:06Z","lastTransitionTime":"2025-10-01T13:46:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:06 crc kubenswrapper[4605]: I1001 13:46:06.757514 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:06 crc kubenswrapper[4605]: I1001 13:46:06.757585 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:06 crc kubenswrapper[4605]: I1001 13:46:06.757607 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:06 crc kubenswrapper[4605]: I1001 13:46:06.757631 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:06 crc kubenswrapper[4605]: I1001 13:46:06.757649 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:06Z","lastTransitionTime":"2025-10-01T13:46:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:06 crc kubenswrapper[4605]: I1001 13:46:06.860800 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:06 crc kubenswrapper[4605]: I1001 13:46:06.860844 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:06 crc kubenswrapper[4605]: I1001 13:46:06.860860 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:06 crc kubenswrapper[4605]: I1001 13:46:06.860881 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:06 crc kubenswrapper[4605]: I1001 13:46:06.860899 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:06Z","lastTransitionTime":"2025-10-01T13:46:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:06 crc kubenswrapper[4605]: I1001 13:46:06.963683 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:06 crc kubenswrapper[4605]: I1001 13:46:06.963756 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:06 crc kubenswrapper[4605]: I1001 13:46:06.963768 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:06 crc kubenswrapper[4605]: I1001 13:46:06.963785 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:06 crc kubenswrapper[4605]: I1001 13:46:06.963795 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:06Z","lastTransitionTime":"2025-10-01T13:46:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:07 crc kubenswrapper[4605]: I1001 13:46:07.066887 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:07 crc kubenswrapper[4605]: I1001 13:46:07.066924 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:07 crc kubenswrapper[4605]: I1001 13:46:07.066932 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:07 crc kubenswrapper[4605]: I1001 13:46:07.066946 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:07 crc kubenswrapper[4605]: I1001 13:46:07.066956 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:07Z","lastTransitionTime":"2025-10-01T13:46:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:07 crc kubenswrapper[4605]: I1001 13:46:07.169932 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:07 crc kubenswrapper[4605]: I1001 13:46:07.170001 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:07 crc kubenswrapper[4605]: I1001 13:46:07.170038 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:07 crc kubenswrapper[4605]: I1001 13:46:07.170056 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:07 crc kubenswrapper[4605]: I1001 13:46:07.170068 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:07Z","lastTransitionTime":"2025-10-01T13:46:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:07 crc kubenswrapper[4605]: I1001 13:46:07.273768 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:07 crc kubenswrapper[4605]: I1001 13:46:07.273830 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:07 crc kubenswrapper[4605]: I1001 13:46:07.273848 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:07 crc kubenswrapper[4605]: I1001 13:46:07.273874 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:07 crc kubenswrapper[4605]: I1001 13:46:07.273903 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:07Z","lastTransitionTime":"2025-10-01T13:46:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:07 crc kubenswrapper[4605]: I1001 13:46:07.377325 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:07 crc kubenswrapper[4605]: I1001 13:46:07.377390 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:07 crc kubenswrapper[4605]: I1001 13:46:07.377412 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:07 crc kubenswrapper[4605]: I1001 13:46:07.377440 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:07 crc kubenswrapper[4605]: I1001 13:46:07.377461 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:07Z","lastTransitionTime":"2025-10-01T13:46:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:07 crc kubenswrapper[4605]: I1001 13:46:07.480374 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:07 crc kubenswrapper[4605]: I1001 13:46:07.480442 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:07 crc kubenswrapper[4605]: I1001 13:46:07.480460 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:07 crc kubenswrapper[4605]: I1001 13:46:07.480483 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:07 crc kubenswrapper[4605]: I1001 13:46:07.480502 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:07Z","lastTransitionTime":"2025-10-01T13:46:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:07 crc kubenswrapper[4605]: I1001 13:46:07.583827 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:07 crc kubenswrapper[4605]: I1001 13:46:07.583922 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:07 crc kubenswrapper[4605]: I1001 13:46:07.583948 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:07 crc kubenswrapper[4605]: I1001 13:46:07.583971 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:07 crc kubenswrapper[4605]: I1001 13:46:07.583988 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:07Z","lastTransitionTime":"2025-10-01T13:46:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:07 crc kubenswrapper[4605]: I1001 13:46:07.686185 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:07 crc kubenswrapper[4605]: I1001 13:46:07.686239 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:07 crc kubenswrapper[4605]: I1001 13:46:07.686251 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:07 crc kubenswrapper[4605]: I1001 13:46:07.686271 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:07 crc kubenswrapper[4605]: I1001 13:46:07.686286 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:07Z","lastTransitionTime":"2025-10-01T13:46:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:07 crc kubenswrapper[4605]: I1001 13:46:07.789860 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:07 crc kubenswrapper[4605]: I1001 13:46:07.789939 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:07 crc kubenswrapper[4605]: I1001 13:46:07.789953 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:07 crc kubenswrapper[4605]: I1001 13:46:07.789972 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:07 crc kubenswrapper[4605]: I1001 13:46:07.789985 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:07Z","lastTransitionTime":"2025-10-01T13:46:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:07 crc kubenswrapper[4605]: I1001 13:46:07.892961 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:07 crc kubenswrapper[4605]: I1001 13:46:07.893027 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:07 crc kubenswrapper[4605]: I1001 13:46:07.893046 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:07 crc kubenswrapper[4605]: I1001 13:46:07.893071 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:07 crc kubenswrapper[4605]: I1001 13:46:07.893088 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:07Z","lastTransitionTime":"2025-10-01T13:46:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:07 crc kubenswrapper[4605]: I1001 13:46:07.926274 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 13:46:07 crc kubenswrapper[4605]: I1001 13:46:07.926303 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:46:07 crc kubenswrapper[4605]: I1001 13:46:07.926345 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 13:46:07 crc kubenswrapper[4605]: E1001 13:46:07.926483 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 13:46:07 crc kubenswrapper[4605]: I1001 13:46:07.926527 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-m7ph7" Oct 01 13:46:07 crc kubenswrapper[4605]: E1001 13:46:07.926669 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 13:46:07 crc kubenswrapper[4605]: E1001 13:46:07.927510 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-m7ph7" podUID="8c172ce5-f64e-417d-9fc7-e06c5e443fbc" Oct 01 13:46:07 crc kubenswrapper[4605]: E1001 13:46:07.927555 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 13:46:07 crc kubenswrapper[4605]: I1001 13:46:07.995649 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:07 crc kubenswrapper[4605]: I1001 13:46:07.995678 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:07 crc kubenswrapper[4605]: I1001 13:46:07.995686 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:07 crc kubenswrapper[4605]: I1001 13:46:07.995699 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:07 crc kubenswrapper[4605]: I1001 13:46:07.995709 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:07Z","lastTransitionTime":"2025-10-01T13:46:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:08 crc kubenswrapper[4605]: I1001 13:46:08.047289 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podStartSLOduration=69.047252289 podStartE2EDuration="1m9.047252289s" podCreationTimestamp="2025-10-01 13:44:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 13:46:08.04691272 +0000 UTC m=+90.790888948" watchObservedRunningTime="2025-10-01 13:46:08.047252289 +0000 UTC m=+90.791228497" Oct 01 13:46:08 crc kubenswrapper[4605]: I1001 13:46:08.098371 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:08 crc kubenswrapper[4605]: I1001 13:46:08.098448 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:08 crc kubenswrapper[4605]: I1001 13:46:08.098469 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:08 crc kubenswrapper[4605]: I1001 13:46:08.098501 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:08 crc kubenswrapper[4605]: I1001 13:46:08.098522 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:08Z","lastTransitionTime":"2025-10-01T13:46:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:08 crc kubenswrapper[4605]: I1001 13:46:08.103168 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-xclfn" podStartSLOduration=69.103148408 podStartE2EDuration="1m9.103148408s" podCreationTimestamp="2025-10-01 13:44:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 13:46:08.067579355 +0000 UTC m=+90.811555563" watchObservedRunningTime="2025-10-01 13:46:08.103148408 +0000 UTC m=+90.847124616" Oct 01 13:46:08 crc kubenswrapper[4605]: I1001 13:46:08.139653 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-wgx5p" podStartSLOduration=69.139623525 podStartE2EDuration="1m9.139623525s" podCreationTimestamp="2025-10-01 13:44:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 13:46:08.10358987 +0000 UTC m=+90.847566088" watchObservedRunningTime="2025-10-01 13:46:08.139623525 +0000 UTC m=+90.883599743" Oct 01 13:46:08 crc kubenswrapper[4605]: I1001 13:46:08.151253 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-gqbr2" podStartSLOduration=68.151231322 podStartE2EDuration="1m8.151231322s" podCreationTimestamp="2025-10-01 13:45:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 13:46:08.150821221 +0000 UTC m=+90.894797429" watchObservedRunningTime="2025-10-01 13:46:08.151231322 +0000 UTC m=+90.895207530" Oct 01 13:46:08 crc kubenswrapper[4605]: I1001 13:46:08.188966 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-6zb6l" podStartSLOduration=69.188936533 podStartE2EDuration="1m9.188936533s" podCreationTimestamp="2025-10-01 13:44:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 13:46:08.188675186 +0000 UTC m=+90.932651394" watchObservedRunningTime="2025-10-01 13:46:08.188936533 +0000 UTC m=+90.932912741" Oct 01 13:46:08 crc kubenswrapper[4605]: I1001 13:46:08.209252 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:08 crc kubenswrapper[4605]: I1001 13:46:08.209286 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:08 crc kubenswrapper[4605]: I1001 13:46:08.209293 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:08 crc kubenswrapper[4605]: I1001 13:46:08.209307 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:08 crc kubenswrapper[4605]: I1001 13:46:08.209317 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:08Z","lastTransitionTime":"2025-10-01T13:46:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:08 crc kubenswrapper[4605]: I1001 13:46:08.238377 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-jvqzn" podStartSLOduration=68.238356674 podStartE2EDuration="1m8.238356674s" podCreationTimestamp="2025-10-01 13:45:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 13:46:08.202502164 +0000 UTC m=+90.946478372" watchObservedRunningTime="2025-10-01 13:46:08.238356674 +0000 UTC m=+90.982332882" Oct 01 13:46:08 crc kubenswrapper[4605]: I1001 13:46:08.238628 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=70.238624561 podStartE2EDuration="1m10.238624561s" podCreationTimestamp="2025-10-01 13:44:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 13:46:08.224750352 +0000 UTC m=+90.968726560" watchObservedRunningTime="2025-10-01 13:46:08.238624561 +0000 UTC m=+90.982600769" Oct 01 13:46:08 crc kubenswrapper[4605]: I1001 13:46:08.246964 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=69.246944999 podStartE2EDuration="1m9.246944999s" podCreationTimestamp="2025-10-01 13:44:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 13:46:08.246946139 +0000 UTC m=+90.990922347" watchObservedRunningTime="2025-10-01 13:46:08.246944999 +0000 UTC m=+90.990921207" Oct 01 13:46:08 crc kubenswrapper[4605]: I1001 13:46:08.292375 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=39.292355931 podStartE2EDuration="39.292355931s" podCreationTimestamp="2025-10-01 13:45:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 13:46:08.277395772 +0000 UTC m=+91.021371980" watchObservedRunningTime="2025-10-01 13:46:08.292355931 +0000 UTC m=+91.036332139" Oct 01 13:46:08 crc kubenswrapper[4605]: I1001 13:46:08.309884 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=7.309862979 podStartE2EDuration="7.309862979s" podCreationTimestamp="2025-10-01 13:46:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 13:46:08.293836731 +0000 UTC m=+91.037812939" watchObservedRunningTime="2025-10-01 13:46:08.309862979 +0000 UTC m=+91.053839197" Oct 01 13:46:08 crc kubenswrapper[4605]: I1001 13:46:08.312037 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:08 crc kubenswrapper[4605]: I1001 13:46:08.312146 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:08 crc kubenswrapper[4605]: I1001 13:46:08.312156 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:08 crc kubenswrapper[4605]: I1001 13:46:08.312167 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:08 crc kubenswrapper[4605]: I1001 13:46:08.312176 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:08Z","lastTransitionTime":"2025-10-01T13:46:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:08 crc kubenswrapper[4605]: I1001 13:46:08.415372 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:08 crc kubenswrapper[4605]: I1001 13:46:08.415466 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:08 crc kubenswrapper[4605]: I1001 13:46:08.415480 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:08 crc kubenswrapper[4605]: I1001 13:46:08.415497 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:08 crc kubenswrapper[4605]: I1001 13:46:08.415508 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:08Z","lastTransitionTime":"2025-10-01T13:46:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:08 crc kubenswrapper[4605]: I1001 13:46:08.518116 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:08 crc kubenswrapper[4605]: I1001 13:46:08.518168 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:08 crc kubenswrapper[4605]: I1001 13:46:08.518178 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:08 crc kubenswrapper[4605]: I1001 13:46:08.518194 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:08 crc kubenswrapper[4605]: I1001 13:46:08.518207 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:08Z","lastTransitionTime":"2025-10-01T13:46:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:08 crc kubenswrapper[4605]: I1001 13:46:08.620473 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:08 crc kubenswrapper[4605]: I1001 13:46:08.620531 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:08 crc kubenswrapper[4605]: I1001 13:46:08.620544 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:08 crc kubenswrapper[4605]: I1001 13:46:08.620563 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:08 crc kubenswrapper[4605]: I1001 13:46:08.620573 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:08Z","lastTransitionTime":"2025-10-01T13:46:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:08 crc kubenswrapper[4605]: I1001 13:46:08.722616 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:08 crc kubenswrapper[4605]: I1001 13:46:08.722678 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:08 crc kubenswrapper[4605]: I1001 13:46:08.722689 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:08 crc kubenswrapper[4605]: I1001 13:46:08.722703 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:08 crc kubenswrapper[4605]: I1001 13:46:08.722713 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:08Z","lastTransitionTime":"2025-10-01T13:46:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:08 crc kubenswrapper[4605]: I1001 13:46:08.824797 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:08 crc kubenswrapper[4605]: I1001 13:46:08.824861 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:08 crc kubenswrapper[4605]: I1001 13:46:08.824884 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:08 crc kubenswrapper[4605]: I1001 13:46:08.824912 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:08 crc kubenswrapper[4605]: I1001 13:46:08.824935 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:08Z","lastTransitionTime":"2025-10-01T13:46:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:08 crc kubenswrapper[4605]: I1001 13:46:08.927227 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:08 crc kubenswrapper[4605]: I1001 13:46:08.927267 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:08 crc kubenswrapper[4605]: I1001 13:46:08.927279 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:08 crc kubenswrapper[4605]: I1001 13:46:08.927294 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:08 crc kubenswrapper[4605]: I1001 13:46:08.927306 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:08Z","lastTransitionTime":"2025-10-01T13:46:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:09 crc kubenswrapper[4605]: I1001 13:46:09.029764 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:09 crc kubenswrapper[4605]: I1001 13:46:09.029812 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:09 crc kubenswrapper[4605]: I1001 13:46:09.029824 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:09 crc kubenswrapper[4605]: I1001 13:46:09.029840 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:09 crc kubenswrapper[4605]: I1001 13:46:09.029850 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:09Z","lastTransitionTime":"2025-10-01T13:46:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:09 crc kubenswrapper[4605]: I1001 13:46:09.133018 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:09 crc kubenswrapper[4605]: I1001 13:46:09.133072 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:09 crc kubenswrapper[4605]: I1001 13:46:09.133083 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:09 crc kubenswrapper[4605]: I1001 13:46:09.133119 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:09 crc kubenswrapper[4605]: I1001 13:46:09.133129 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:09Z","lastTransitionTime":"2025-10-01T13:46:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:09 crc kubenswrapper[4605]: I1001 13:46:09.235897 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:09 crc kubenswrapper[4605]: I1001 13:46:09.235936 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:09 crc kubenswrapper[4605]: I1001 13:46:09.235944 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:09 crc kubenswrapper[4605]: I1001 13:46:09.235957 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:09 crc kubenswrapper[4605]: I1001 13:46:09.235968 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:09Z","lastTransitionTime":"2025-10-01T13:46:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:09 crc kubenswrapper[4605]: I1001 13:46:09.338020 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:09 crc kubenswrapper[4605]: I1001 13:46:09.338055 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:09 crc kubenswrapper[4605]: I1001 13:46:09.338065 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:09 crc kubenswrapper[4605]: I1001 13:46:09.338080 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:09 crc kubenswrapper[4605]: I1001 13:46:09.338105 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:09Z","lastTransitionTime":"2025-10-01T13:46:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:09 crc kubenswrapper[4605]: I1001 13:46:09.440232 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:09 crc kubenswrapper[4605]: I1001 13:46:09.440299 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:09 crc kubenswrapper[4605]: I1001 13:46:09.440314 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:09 crc kubenswrapper[4605]: I1001 13:46:09.440329 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:09 crc kubenswrapper[4605]: I1001 13:46:09.440339 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:09Z","lastTransitionTime":"2025-10-01T13:46:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:09 crc kubenswrapper[4605]: I1001 13:46:09.542819 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:09 crc kubenswrapper[4605]: I1001 13:46:09.542851 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:09 crc kubenswrapper[4605]: I1001 13:46:09.542860 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:09 crc kubenswrapper[4605]: I1001 13:46:09.542873 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:09 crc kubenswrapper[4605]: I1001 13:46:09.542881 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:09Z","lastTransitionTime":"2025-10-01T13:46:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:09 crc kubenswrapper[4605]: I1001 13:46:09.644798 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:09 crc kubenswrapper[4605]: I1001 13:46:09.644852 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:09 crc kubenswrapper[4605]: I1001 13:46:09.644862 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:09 crc kubenswrapper[4605]: I1001 13:46:09.644877 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:09 crc kubenswrapper[4605]: I1001 13:46:09.644887 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:09Z","lastTransitionTime":"2025-10-01T13:46:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:09 crc kubenswrapper[4605]: I1001 13:46:09.747630 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:09 crc kubenswrapper[4605]: I1001 13:46:09.747670 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:09 crc kubenswrapper[4605]: I1001 13:46:09.747680 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:09 crc kubenswrapper[4605]: I1001 13:46:09.747694 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:09 crc kubenswrapper[4605]: I1001 13:46:09.747704 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:09Z","lastTransitionTime":"2025-10-01T13:46:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:09 crc kubenswrapper[4605]: I1001 13:46:09.849730 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:09 crc kubenswrapper[4605]: I1001 13:46:09.849786 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:09 crc kubenswrapper[4605]: I1001 13:46:09.849798 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:09 crc kubenswrapper[4605]: I1001 13:46:09.849816 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:09 crc kubenswrapper[4605]: I1001 13:46:09.849831 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:09Z","lastTransitionTime":"2025-10-01T13:46:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:09 crc kubenswrapper[4605]: I1001 13:46:09.925662 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 13:46:09 crc kubenswrapper[4605]: E1001 13:46:09.925793 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 13:46:09 crc kubenswrapper[4605]: I1001 13:46:09.926286 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 13:46:09 crc kubenswrapper[4605]: I1001 13:46:09.926353 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-m7ph7" Oct 01 13:46:09 crc kubenswrapper[4605]: E1001 13:46:09.926403 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 13:46:09 crc kubenswrapper[4605]: I1001 13:46:09.926599 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:46:09 crc kubenswrapper[4605]: E1001 13:46:09.926649 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-m7ph7" podUID="8c172ce5-f64e-417d-9fc7-e06c5e443fbc" Oct 01 13:46:09 crc kubenswrapper[4605]: E1001 13:46:09.926762 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 13:46:09 crc kubenswrapper[4605]: I1001 13:46:09.952420 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:09 crc kubenswrapper[4605]: I1001 13:46:09.952460 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:09 crc kubenswrapper[4605]: I1001 13:46:09.952469 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:09 crc kubenswrapper[4605]: I1001 13:46:09.952484 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:09 crc kubenswrapper[4605]: I1001 13:46:09.952494 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:09Z","lastTransitionTime":"2025-10-01T13:46:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:10 crc kubenswrapper[4605]: I1001 13:46:10.055023 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:10 crc kubenswrapper[4605]: I1001 13:46:10.055057 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:10 crc kubenswrapper[4605]: I1001 13:46:10.055065 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:10 crc kubenswrapper[4605]: I1001 13:46:10.055078 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:10 crc kubenswrapper[4605]: I1001 13:46:10.055100 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:10Z","lastTransitionTime":"2025-10-01T13:46:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:10 crc kubenswrapper[4605]: I1001 13:46:10.157799 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:10 crc kubenswrapper[4605]: I1001 13:46:10.157834 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:10 crc kubenswrapper[4605]: I1001 13:46:10.157845 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:10 crc kubenswrapper[4605]: I1001 13:46:10.157861 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:10 crc kubenswrapper[4605]: I1001 13:46:10.157873 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:10Z","lastTransitionTime":"2025-10-01T13:46:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:10 crc kubenswrapper[4605]: I1001 13:46:10.259914 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:10 crc kubenswrapper[4605]: I1001 13:46:10.259974 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:10 crc kubenswrapper[4605]: I1001 13:46:10.259986 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:10 crc kubenswrapper[4605]: I1001 13:46:10.260001 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:10 crc kubenswrapper[4605]: I1001 13:46:10.260012 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:10Z","lastTransitionTime":"2025-10-01T13:46:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:10 crc kubenswrapper[4605]: I1001 13:46:10.362409 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:10 crc kubenswrapper[4605]: I1001 13:46:10.362436 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:10 crc kubenswrapper[4605]: I1001 13:46:10.362444 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:10 crc kubenswrapper[4605]: I1001 13:46:10.362457 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:10 crc kubenswrapper[4605]: I1001 13:46:10.362467 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:10Z","lastTransitionTime":"2025-10-01T13:46:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:10 crc kubenswrapper[4605]: I1001 13:46:10.464885 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:10 crc kubenswrapper[4605]: I1001 13:46:10.464928 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:10 crc kubenswrapper[4605]: I1001 13:46:10.464938 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:10 crc kubenswrapper[4605]: I1001 13:46:10.464955 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:10 crc kubenswrapper[4605]: I1001 13:46:10.464966 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:10Z","lastTransitionTime":"2025-10-01T13:46:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:10 crc kubenswrapper[4605]: I1001 13:46:10.567179 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:10 crc kubenswrapper[4605]: I1001 13:46:10.567213 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:10 crc kubenswrapper[4605]: I1001 13:46:10.567221 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:10 crc kubenswrapper[4605]: I1001 13:46:10.567234 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:10 crc kubenswrapper[4605]: I1001 13:46:10.567243 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:10Z","lastTransitionTime":"2025-10-01T13:46:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:10 crc kubenswrapper[4605]: I1001 13:46:10.669432 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:10 crc kubenswrapper[4605]: I1001 13:46:10.669465 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:10 crc kubenswrapper[4605]: I1001 13:46:10.669472 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:10 crc kubenswrapper[4605]: I1001 13:46:10.669484 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:10 crc kubenswrapper[4605]: I1001 13:46:10.669493 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:10Z","lastTransitionTime":"2025-10-01T13:46:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:10 crc kubenswrapper[4605]: I1001 13:46:10.771838 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:10 crc kubenswrapper[4605]: I1001 13:46:10.771874 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:10 crc kubenswrapper[4605]: I1001 13:46:10.771883 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:10 crc kubenswrapper[4605]: I1001 13:46:10.771895 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:10 crc kubenswrapper[4605]: I1001 13:46:10.771906 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:10Z","lastTransitionTime":"2025-10-01T13:46:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:10 crc kubenswrapper[4605]: I1001 13:46:10.873668 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:10 crc kubenswrapper[4605]: I1001 13:46:10.873713 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:10 crc kubenswrapper[4605]: I1001 13:46:10.873727 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:10 crc kubenswrapper[4605]: I1001 13:46:10.873742 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:10 crc kubenswrapper[4605]: I1001 13:46:10.873753 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:10Z","lastTransitionTime":"2025-10-01T13:46:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:10 crc kubenswrapper[4605]: I1001 13:46:10.975234 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:10 crc kubenswrapper[4605]: I1001 13:46:10.975269 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:10 crc kubenswrapper[4605]: I1001 13:46:10.975278 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:10 crc kubenswrapper[4605]: I1001 13:46:10.975290 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:10 crc kubenswrapper[4605]: I1001 13:46:10.975298 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:10Z","lastTransitionTime":"2025-10-01T13:46:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:11 crc kubenswrapper[4605]: I1001 13:46:11.077318 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:11 crc kubenswrapper[4605]: I1001 13:46:11.077367 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:11 crc kubenswrapper[4605]: I1001 13:46:11.077379 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:11 crc kubenswrapper[4605]: I1001 13:46:11.077394 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:11 crc kubenswrapper[4605]: I1001 13:46:11.077407 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:11Z","lastTransitionTime":"2025-10-01T13:46:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:11 crc kubenswrapper[4605]: I1001 13:46:11.179971 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:11 crc kubenswrapper[4605]: I1001 13:46:11.180059 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:11 crc kubenswrapper[4605]: I1001 13:46:11.180085 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:11 crc kubenswrapper[4605]: I1001 13:46:11.180561 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:11 crc kubenswrapper[4605]: I1001 13:46:11.180771 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:11Z","lastTransitionTime":"2025-10-01T13:46:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:11 crc kubenswrapper[4605]: I1001 13:46:11.283940 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:11 crc kubenswrapper[4605]: I1001 13:46:11.283990 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:11 crc kubenswrapper[4605]: I1001 13:46:11.284001 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:11 crc kubenswrapper[4605]: I1001 13:46:11.284013 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:11 crc kubenswrapper[4605]: I1001 13:46:11.284022 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:11Z","lastTransitionTime":"2025-10-01T13:46:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:11 crc kubenswrapper[4605]: I1001 13:46:11.386900 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:11 crc kubenswrapper[4605]: I1001 13:46:11.386943 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:11 crc kubenswrapper[4605]: I1001 13:46:11.386953 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:11 crc kubenswrapper[4605]: I1001 13:46:11.386968 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:11 crc kubenswrapper[4605]: I1001 13:46:11.386978 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:11Z","lastTransitionTime":"2025-10-01T13:46:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:11 crc kubenswrapper[4605]: I1001 13:46:11.490180 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:11 crc kubenswrapper[4605]: I1001 13:46:11.490227 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:11 crc kubenswrapper[4605]: I1001 13:46:11.490237 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:11 crc kubenswrapper[4605]: I1001 13:46:11.490253 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:11 crc kubenswrapper[4605]: I1001 13:46:11.490268 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:11Z","lastTransitionTime":"2025-10-01T13:46:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:11 crc kubenswrapper[4605]: I1001 13:46:11.593529 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:11 crc kubenswrapper[4605]: I1001 13:46:11.593583 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:11 crc kubenswrapper[4605]: I1001 13:46:11.593594 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:11 crc kubenswrapper[4605]: I1001 13:46:11.593623 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:11 crc kubenswrapper[4605]: I1001 13:46:11.593636 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:11Z","lastTransitionTime":"2025-10-01T13:46:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:11 crc kubenswrapper[4605]: I1001 13:46:11.696421 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:11 crc kubenswrapper[4605]: I1001 13:46:11.696472 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:11 crc kubenswrapper[4605]: I1001 13:46:11.696483 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:11 crc kubenswrapper[4605]: I1001 13:46:11.696500 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:11 crc kubenswrapper[4605]: I1001 13:46:11.696513 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:11Z","lastTransitionTime":"2025-10-01T13:46:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:11 crc kubenswrapper[4605]: I1001 13:46:11.798478 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:11 crc kubenswrapper[4605]: I1001 13:46:11.799034 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:11 crc kubenswrapper[4605]: I1001 13:46:11.799080 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:11 crc kubenswrapper[4605]: I1001 13:46:11.799128 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:11 crc kubenswrapper[4605]: I1001 13:46:11.799142 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:11Z","lastTransitionTime":"2025-10-01T13:46:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:11 crc kubenswrapper[4605]: I1001 13:46:11.902481 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:11 crc kubenswrapper[4605]: I1001 13:46:11.902522 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:11 crc kubenswrapper[4605]: I1001 13:46:11.902539 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:11 crc kubenswrapper[4605]: I1001 13:46:11.902553 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:11 crc kubenswrapper[4605]: I1001 13:46:11.902572 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:11Z","lastTransitionTime":"2025-10-01T13:46:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:11 crc kubenswrapper[4605]: I1001 13:46:11.926077 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-m7ph7" Oct 01 13:46:11 crc kubenswrapper[4605]: I1001 13:46:11.926122 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 13:46:11 crc kubenswrapper[4605]: I1001 13:46:11.926144 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 13:46:11 crc kubenswrapper[4605]: I1001 13:46:11.926257 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:46:11 crc kubenswrapper[4605]: E1001 13:46:11.926342 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 13:46:11 crc kubenswrapper[4605]: E1001 13:46:11.926582 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-m7ph7" podUID="8c172ce5-f64e-417d-9fc7-e06c5e443fbc" Oct 01 13:46:11 crc kubenswrapper[4605]: E1001 13:46:11.926769 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 13:46:11 crc kubenswrapper[4605]: E1001 13:46:11.926854 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 13:46:12 crc kubenswrapper[4605]: I1001 13:46:12.004686 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:12 crc kubenswrapper[4605]: I1001 13:46:12.004722 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:12 crc kubenswrapper[4605]: I1001 13:46:12.004731 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:12 crc kubenswrapper[4605]: I1001 13:46:12.004744 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:12 crc kubenswrapper[4605]: I1001 13:46:12.004754 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:12Z","lastTransitionTime":"2025-10-01T13:46:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:12 crc kubenswrapper[4605]: I1001 13:46:12.107333 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:12 crc kubenswrapper[4605]: I1001 13:46:12.107379 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:12 crc kubenswrapper[4605]: I1001 13:46:12.107390 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:12 crc kubenswrapper[4605]: I1001 13:46:12.107406 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:12 crc kubenswrapper[4605]: I1001 13:46:12.107417 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:12Z","lastTransitionTime":"2025-10-01T13:46:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:12 crc kubenswrapper[4605]: I1001 13:46:12.210160 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:12 crc kubenswrapper[4605]: I1001 13:46:12.210201 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:12 crc kubenswrapper[4605]: I1001 13:46:12.210217 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:12 crc kubenswrapper[4605]: I1001 13:46:12.210232 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:12 crc kubenswrapper[4605]: I1001 13:46:12.210242 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:12Z","lastTransitionTime":"2025-10-01T13:46:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:12 crc kubenswrapper[4605]: I1001 13:46:12.313384 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:12 crc kubenswrapper[4605]: I1001 13:46:12.313457 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:12 crc kubenswrapper[4605]: I1001 13:46:12.313469 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:12 crc kubenswrapper[4605]: I1001 13:46:12.313487 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:12 crc kubenswrapper[4605]: I1001 13:46:12.313498 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:12Z","lastTransitionTime":"2025-10-01T13:46:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:12 crc kubenswrapper[4605]: I1001 13:46:12.415462 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:12 crc kubenswrapper[4605]: I1001 13:46:12.415517 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:12 crc kubenswrapper[4605]: I1001 13:46:12.415529 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:12 crc kubenswrapper[4605]: I1001 13:46:12.415545 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:12 crc kubenswrapper[4605]: I1001 13:46:12.415556 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:12Z","lastTransitionTime":"2025-10-01T13:46:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:12 crc kubenswrapper[4605]: I1001 13:46:12.517917 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:12 crc kubenswrapper[4605]: I1001 13:46:12.517970 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:12 crc kubenswrapper[4605]: I1001 13:46:12.517981 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:12 crc kubenswrapper[4605]: I1001 13:46:12.517999 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:12 crc kubenswrapper[4605]: I1001 13:46:12.518013 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:12Z","lastTransitionTime":"2025-10-01T13:46:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:12 crc kubenswrapper[4605]: I1001 13:46:12.620286 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:12 crc kubenswrapper[4605]: I1001 13:46:12.620323 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:12 crc kubenswrapper[4605]: I1001 13:46:12.620342 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:12 crc kubenswrapper[4605]: I1001 13:46:12.620361 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:12 crc kubenswrapper[4605]: I1001 13:46:12.620373 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:12Z","lastTransitionTime":"2025-10-01T13:46:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:12 crc kubenswrapper[4605]: I1001 13:46:12.723125 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:12 crc kubenswrapper[4605]: I1001 13:46:12.723174 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:12 crc kubenswrapper[4605]: I1001 13:46:12.723191 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:12 crc kubenswrapper[4605]: I1001 13:46:12.723208 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:12 crc kubenswrapper[4605]: I1001 13:46:12.723221 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:12Z","lastTransitionTime":"2025-10-01T13:46:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:12 crc kubenswrapper[4605]: I1001 13:46:12.825392 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:12 crc kubenswrapper[4605]: I1001 13:46:12.825475 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:12 crc kubenswrapper[4605]: I1001 13:46:12.825485 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:12 crc kubenswrapper[4605]: I1001 13:46:12.825503 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:12 crc kubenswrapper[4605]: I1001 13:46:12.825513 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:12Z","lastTransitionTime":"2025-10-01T13:46:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:12 crc kubenswrapper[4605]: I1001 13:46:12.927802 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:12 crc kubenswrapper[4605]: I1001 13:46:12.927839 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:12 crc kubenswrapper[4605]: I1001 13:46:12.927848 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:12 crc kubenswrapper[4605]: I1001 13:46:12.927860 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:12 crc kubenswrapper[4605]: I1001 13:46:12.927870 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:12Z","lastTransitionTime":"2025-10-01T13:46:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:12 crc kubenswrapper[4605]: I1001 13:46:12.928335 4605 scope.go:117] "RemoveContainer" containerID="0be438218d8ebb0fffe42a99bb838e7aa749b7749c1ca18fb294cecb4fe8b4e4" Oct 01 13:46:12 crc kubenswrapper[4605]: E1001 13:46:12.928534 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-kzv4p_openshift-ovn-kubernetes(e0b90c02-c41c-4f5b-ae0a-c6444435a3ae)\"" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" podUID="e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" Oct 01 13:46:12 crc kubenswrapper[4605]: I1001 13:46:12.945244 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Oct 01 13:46:13 crc kubenswrapper[4605]: I1001 13:46:13.031585 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:13 crc kubenswrapper[4605]: I1001 13:46:13.031625 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:13 crc kubenswrapper[4605]: I1001 13:46:13.031636 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:13 crc kubenswrapper[4605]: I1001 13:46:13.031651 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:13 crc kubenswrapper[4605]: I1001 13:46:13.031663 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:13Z","lastTransitionTime":"2025-10-01T13:46:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:13 crc kubenswrapper[4605]: I1001 13:46:13.133524 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:13 crc kubenswrapper[4605]: I1001 13:46:13.133611 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:13 crc kubenswrapper[4605]: I1001 13:46:13.133626 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:13 crc kubenswrapper[4605]: I1001 13:46:13.133642 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:13 crc kubenswrapper[4605]: I1001 13:46:13.133655 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:13Z","lastTransitionTime":"2025-10-01T13:46:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:13 crc kubenswrapper[4605]: I1001 13:46:13.236468 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:13 crc kubenswrapper[4605]: I1001 13:46:13.236538 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:13 crc kubenswrapper[4605]: I1001 13:46:13.236551 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:13 crc kubenswrapper[4605]: I1001 13:46:13.236567 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:13 crc kubenswrapper[4605]: I1001 13:46:13.236580 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:13Z","lastTransitionTime":"2025-10-01T13:46:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:13 crc kubenswrapper[4605]: I1001 13:46:13.338801 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:13 crc kubenswrapper[4605]: I1001 13:46:13.338862 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:13 crc kubenswrapper[4605]: I1001 13:46:13.338874 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:13 crc kubenswrapper[4605]: I1001 13:46:13.338890 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:13 crc kubenswrapper[4605]: I1001 13:46:13.338901 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:13Z","lastTransitionTime":"2025-10-01T13:46:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:13 crc kubenswrapper[4605]: I1001 13:46:13.441955 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:13 crc kubenswrapper[4605]: I1001 13:46:13.442024 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:13 crc kubenswrapper[4605]: I1001 13:46:13.442041 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:13 crc kubenswrapper[4605]: I1001 13:46:13.442056 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:13 crc kubenswrapper[4605]: I1001 13:46:13.442080 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:13Z","lastTransitionTime":"2025-10-01T13:46:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:13 crc kubenswrapper[4605]: I1001 13:46:13.544583 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:13 crc kubenswrapper[4605]: I1001 13:46:13.544667 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:13 crc kubenswrapper[4605]: I1001 13:46:13.544680 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:13 crc kubenswrapper[4605]: I1001 13:46:13.544701 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:13 crc kubenswrapper[4605]: I1001 13:46:13.544712 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:13Z","lastTransitionTime":"2025-10-01T13:46:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:13 crc kubenswrapper[4605]: I1001 13:46:13.647153 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:13 crc kubenswrapper[4605]: I1001 13:46:13.647225 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:13 crc kubenswrapper[4605]: I1001 13:46:13.647234 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:13 crc kubenswrapper[4605]: I1001 13:46:13.647249 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:13 crc kubenswrapper[4605]: I1001 13:46:13.647258 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:13Z","lastTransitionTime":"2025-10-01T13:46:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:13 crc kubenswrapper[4605]: I1001 13:46:13.750326 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:13 crc kubenswrapper[4605]: I1001 13:46:13.750395 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:13 crc kubenswrapper[4605]: I1001 13:46:13.750411 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:13 crc kubenswrapper[4605]: I1001 13:46:13.750429 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:13 crc kubenswrapper[4605]: I1001 13:46:13.750443 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:13Z","lastTransitionTime":"2025-10-01T13:46:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:13 crc kubenswrapper[4605]: I1001 13:46:13.852692 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:13 crc kubenswrapper[4605]: I1001 13:46:13.852748 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:13 crc kubenswrapper[4605]: I1001 13:46:13.852757 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:13 crc kubenswrapper[4605]: I1001 13:46:13.852771 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:13 crc kubenswrapper[4605]: I1001 13:46:13.852780 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:13Z","lastTransitionTime":"2025-10-01T13:46:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:13 crc kubenswrapper[4605]: I1001 13:46:13.925978 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:46:13 crc kubenswrapper[4605]: I1001 13:46:13.926063 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-m7ph7" Oct 01 13:46:13 crc kubenswrapper[4605]: I1001 13:46:13.926063 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 13:46:13 crc kubenswrapper[4605]: E1001 13:46:13.926176 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 13:46:13 crc kubenswrapper[4605]: E1001 13:46:13.926348 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-m7ph7" podUID="8c172ce5-f64e-417d-9fc7-e06c5e443fbc" Oct 01 13:46:13 crc kubenswrapper[4605]: I1001 13:46:13.926391 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 13:46:13 crc kubenswrapper[4605]: E1001 13:46:13.926526 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 13:46:13 crc kubenswrapper[4605]: E1001 13:46:13.926604 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 13:46:13 crc kubenswrapper[4605]: I1001 13:46:13.955269 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:13 crc kubenswrapper[4605]: I1001 13:46:13.955302 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:13 crc kubenswrapper[4605]: I1001 13:46:13.955311 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:13 crc kubenswrapper[4605]: I1001 13:46:13.955324 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:13 crc kubenswrapper[4605]: I1001 13:46:13.955333 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:13Z","lastTransitionTime":"2025-10-01T13:46:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:14 crc kubenswrapper[4605]: I1001 13:46:14.057654 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:14 crc kubenswrapper[4605]: I1001 13:46:14.057697 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:14 crc kubenswrapper[4605]: I1001 13:46:14.057709 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:14 crc kubenswrapper[4605]: I1001 13:46:14.057726 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:14 crc kubenswrapper[4605]: I1001 13:46:14.057738 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:14Z","lastTransitionTime":"2025-10-01T13:46:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:14 crc kubenswrapper[4605]: I1001 13:46:14.161245 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:14 crc kubenswrapper[4605]: I1001 13:46:14.161308 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:14 crc kubenswrapper[4605]: I1001 13:46:14.161319 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:14 crc kubenswrapper[4605]: I1001 13:46:14.161352 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:14 crc kubenswrapper[4605]: I1001 13:46:14.161369 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:14Z","lastTransitionTime":"2025-10-01T13:46:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:14 crc kubenswrapper[4605]: I1001 13:46:14.263725 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:14 crc kubenswrapper[4605]: I1001 13:46:14.263799 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:14 crc kubenswrapper[4605]: I1001 13:46:14.263866 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:14 crc kubenswrapper[4605]: I1001 13:46:14.263892 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:14 crc kubenswrapper[4605]: I1001 13:46:14.263910 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:14Z","lastTransitionTime":"2025-10-01T13:46:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:14 crc kubenswrapper[4605]: I1001 13:46:14.366515 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:14 crc kubenswrapper[4605]: I1001 13:46:14.366560 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:14 crc kubenswrapper[4605]: I1001 13:46:14.366571 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:14 crc kubenswrapper[4605]: I1001 13:46:14.366588 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:14 crc kubenswrapper[4605]: I1001 13:46:14.366599 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:14Z","lastTransitionTime":"2025-10-01T13:46:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:14 crc kubenswrapper[4605]: I1001 13:46:14.469342 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:14 crc kubenswrapper[4605]: I1001 13:46:14.469382 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:14 crc kubenswrapper[4605]: I1001 13:46:14.469391 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:14 crc kubenswrapper[4605]: I1001 13:46:14.469404 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:14 crc kubenswrapper[4605]: I1001 13:46:14.469414 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:14Z","lastTransitionTime":"2025-10-01T13:46:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:14 crc kubenswrapper[4605]: I1001 13:46:14.571760 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:14 crc kubenswrapper[4605]: I1001 13:46:14.572072 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:14 crc kubenswrapper[4605]: I1001 13:46:14.572205 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:14 crc kubenswrapper[4605]: I1001 13:46:14.572432 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:14 crc kubenswrapper[4605]: I1001 13:46:14.572537 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:14Z","lastTransitionTime":"2025-10-01T13:46:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:14 crc kubenswrapper[4605]: I1001 13:46:14.676615 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:14 crc kubenswrapper[4605]: I1001 13:46:14.677033 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:14 crc kubenswrapper[4605]: I1001 13:46:14.677261 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:14 crc kubenswrapper[4605]: I1001 13:46:14.677428 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:14 crc kubenswrapper[4605]: I1001 13:46:14.677575 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:14Z","lastTransitionTime":"2025-10-01T13:46:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:14 crc kubenswrapper[4605]: I1001 13:46:14.780395 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:14 crc kubenswrapper[4605]: I1001 13:46:14.781285 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:14 crc kubenswrapper[4605]: I1001 13:46:14.781382 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:14 crc kubenswrapper[4605]: I1001 13:46:14.781447 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:14 crc kubenswrapper[4605]: I1001 13:46:14.781529 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:14Z","lastTransitionTime":"2025-10-01T13:46:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:14 crc kubenswrapper[4605]: I1001 13:46:14.884777 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:14 crc kubenswrapper[4605]: I1001 13:46:14.884861 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:14 crc kubenswrapper[4605]: I1001 13:46:14.884874 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:14 crc kubenswrapper[4605]: I1001 13:46:14.884893 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:14 crc kubenswrapper[4605]: I1001 13:46:14.884907 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:14Z","lastTransitionTime":"2025-10-01T13:46:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:14 crc kubenswrapper[4605]: I1001 13:46:14.987257 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:14 crc kubenswrapper[4605]: I1001 13:46:14.987293 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:14 crc kubenswrapper[4605]: I1001 13:46:14.987303 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:14 crc kubenswrapper[4605]: I1001 13:46:14.987316 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:14 crc kubenswrapper[4605]: I1001 13:46:14.987326 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:14Z","lastTransitionTime":"2025-10-01T13:46:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:15 crc kubenswrapper[4605]: I1001 13:46:15.090010 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:15 crc kubenswrapper[4605]: I1001 13:46:15.090044 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:15 crc kubenswrapper[4605]: I1001 13:46:15.090053 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:15 crc kubenswrapper[4605]: I1001 13:46:15.090065 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:15 crc kubenswrapper[4605]: I1001 13:46:15.090074 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:15Z","lastTransitionTime":"2025-10-01T13:46:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:15 crc kubenswrapper[4605]: I1001 13:46:15.193330 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:15 crc kubenswrapper[4605]: I1001 13:46:15.193376 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:15 crc kubenswrapper[4605]: I1001 13:46:15.193387 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:15 crc kubenswrapper[4605]: I1001 13:46:15.193403 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:15 crc kubenswrapper[4605]: I1001 13:46:15.193412 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:15Z","lastTransitionTime":"2025-10-01T13:46:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:15 crc kubenswrapper[4605]: I1001 13:46:15.295754 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:15 crc kubenswrapper[4605]: I1001 13:46:15.295797 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:15 crc kubenswrapper[4605]: I1001 13:46:15.295807 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:15 crc kubenswrapper[4605]: I1001 13:46:15.295823 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:15 crc kubenswrapper[4605]: I1001 13:46:15.295835 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:15Z","lastTransitionTime":"2025-10-01T13:46:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:15 crc kubenswrapper[4605]: I1001 13:46:15.398346 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:15 crc kubenswrapper[4605]: I1001 13:46:15.398389 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:15 crc kubenswrapper[4605]: I1001 13:46:15.398398 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:15 crc kubenswrapper[4605]: I1001 13:46:15.398413 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:15 crc kubenswrapper[4605]: I1001 13:46:15.398423 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:15Z","lastTransitionTime":"2025-10-01T13:46:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:15 crc kubenswrapper[4605]: I1001 13:46:15.500631 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:15 crc kubenswrapper[4605]: I1001 13:46:15.500674 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:15 crc kubenswrapper[4605]: I1001 13:46:15.500682 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:15 crc kubenswrapper[4605]: I1001 13:46:15.500696 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:15 crc kubenswrapper[4605]: I1001 13:46:15.500706 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:15Z","lastTransitionTime":"2025-10-01T13:46:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:15 crc kubenswrapper[4605]: I1001 13:46:15.603851 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:15 crc kubenswrapper[4605]: I1001 13:46:15.603884 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:15 crc kubenswrapper[4605]: I1001 13:46:15.603894 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:15 crc kubenswrapper[4605]: I1001 13:46:15.603907 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:15 crc kubenswrapper[4605]: I1001 13:46:15.603917 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:15Z","lastTransitionTime":"2025-10-01T13:46:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:15 crc kubenswrapper[4605]: I1001 13:46:15.706580 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:15 crc kubenswrapper[4605]: I1001 13:46:15.706641 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:15 crc kubenswrapper[4605]: I1001 13:46:15.706653 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:15 crc kubenswrapper[4605]: I1001 13:46:15.706666 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:15 crc kubenswrapper[4605]: I1001 13:46:15.706678 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:15Z","lastTransitionTime":"2025-10-01T13:46:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:15 crc kubenswrapper[4605]: I1001 13:46:15.808839 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:15 crc kubenswrapper[4605]: I1001 13:46:15.808879 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:15 crc kubenswrapper[4605]: I1001 13:46:15.808893 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:15 crc kubenswrapper[4605]: I1001 13:46:15.808910 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:15 crc kubenswrapper[4605]: I1001 13:46:15.808922 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:15Z","lastTransitionTime":"2025-10-01T13:46:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:15 crc kubenswrapper[4605]: I1001 13:46:15.911514 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:15 crc kubenswrapper[4605]: I1001 13:46:15.911555 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:15 crc kubenswrapper[4605]: I1001 13:46:15.911567 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:15 crc kubenswrapper[4605]: I1001 13:46:15.911582 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:15 crc kubenswrapper[4605]: I1001 13:46:15.911592 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:15Z","lastTransitionTime":"2025-10-01T13:46:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:15 crc kubenswrapper[4605]: I1001 13:46:15.926506 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 13:46:15 crc kubenswrapper[4605]: I1001 13:46:15.926549 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-m7ph7" Oct 01 13:46:15 crc kubenswrapper[4605]: I1001 13:46:15.926561 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 13:46:15 crc kubenswrapper[4605]: E1001 13:46:15.926649 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 13:46:15 crc kubenswrapper[4605]: I1001 13:46:15.926707 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:46:15 crc kubenswrapper[4605]: E1001 13:46:15.926856 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-m7ph7" podUID="8c172ce5-f64e-417d-9fc7-e06c5e443fbc" Oct 01 13:46:15 crc kubenswrapper[4605]: E1001 13:46:15.927045 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 13:46:15 crc kubenswrapper[4605]: E1001 13:46:15.927238 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 13:46:16 crc kubenswrapper[4605]: I1001 13:46:16.013948 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:16 crc kubenswrapper[4605]: I1001 13:46:16.014291 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:16 crc kubenswrapper[4605]: I1001 13:46:16.014394 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:16 crc kubenswrapper[4605]: I1001 13:46:16.014477 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:16 crc kubenswrapper[4605]: I1001 13:46:16.014558 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:16Z","lastTransitionTime":"2025-10-01T13:46:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:16 crc kubenswrapper[4605]: I1001 13:46:16.118073 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:16 crc kubenswrapper[4605]: I1001 13:46:16.118491 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:16 crc kubenswrapper[4605]: I1001 13:46:16.118668 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:16 crc kubenswrapper[4605]: I1001 13:46:16.118746 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:16 crc kubenswrapper[4605]: I1001 13:46:16.118817 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:16Z","lastTransitionTime":"2025-10-01T13:46:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:16 crc kubenswrapper[4605]: I1001 13:46:16.186435 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 13:46:16 crc kubenswrapper[4605]: I1001 13:46:16.186482 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 13:46:16 crc kubenswrapper[4605]: I1001 13:46:16.186491 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 13:46:16 crc kubenswrapper[4605]: I1001 13:46:16.186505 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 13:46:16 crc kubenswrapper[4605]: I1001 13:46:16.186515 4605 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T13:46:16Z","lastTransitionTime":"2025-10-01T13:46:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 13:46:16 crc kubenswrapper[4605]: I1001 13:46:16.230779 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-4tjmd"] Oct 01 13:46:16 crc kubenswrapper[4605]: I1001 13:46:16.231182 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4tjmd" Oct 01 13:46:16 crc kubenswrapper[4605]: I1001 13:46:16.233400 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Oct 01 13:46:16 crc kubenswrapper[4605]: I1001 13:46:16.234167 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Oct 01 13:46:16 crc kubenswrapper[4605]: I1001 13:46:16.234244 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Oct 01 13:46:16 crc kubenswrapper[4605]: I1001 13:46:16.234476 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Oct 01 13:46:16 crc kubenswrapper[4605]: I1001 13:46:16.270080 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=4.270065219 podStartE2EDuration="4.270065219s" podCreationTimestamp="2025-10-01 13:46:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 13:46:16.268704402 +0000 UTC m=+99.012680620" watchObservedRunningTime="2025-10-01 13:46:16.270065219 +0000 UTC m=+99.014041427" Oct 01 13:46:16 crc kubenswrapper[4605]: I1001 13:46:16.313352 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/1d9880b5-95f4-456c-8fbe-f349c69994d3-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-4tjmd\" (UID: \"1d9880b5-95f4-456c-8fbe-f349c69994d3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4tjmd" Oct 01 13:46:16 crc kubenswrapper[4605]: I1001 13:46:16.313430 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/1d9880b5-95f4-456c-8fbe-f349c69994d3-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-4tjmd\" (UID: \"1d9880b5-95f4-456c-8fbe-f349c69994d3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4tjmd" Oct 01 13:46:16 crc kubenswrapper[4605]: I1001 13:46:16.313460 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1d9880b5-95f4-456c-8fbe-f349c69994d3-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-4tjmd\" (UID: \"1d9880b5-95f4-456c-8fbe-f349c69994d3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4tjmd" Oct 01 13:46:16 crc kubenswrapper[4605]: I1001 13:46:16.313477 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/1d9880b5-95f4-456c-8fbe-f349c69994d3-service-ca\") pod \"cluster-version-operator-5c965bbfc6-4tjmd\" (UID: \"1d9880b5-95f4-456c-8fbe-f349c69994d3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4tjmd" Oct 01 13:46:16 crc kubenswrapper[4605]: I1001 13:46:16.313502 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1d9880b5-95f4-456c-8fbe-f349c69994d3-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-4tjmd\" (UID: \"1d9880b5-95f4-456c-8fbe-f349c69994d3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4tjmd" Oct 01 13:46:16 crc kubenswrapper[4605]: I1001 13:46:16.414356 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1d9880b5-95f4-456c-8fbe-f349c69994d3-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-4tjmd\" (UID: \"1d9880b5-95f4-456c-8fbe-f349c69994d3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4tjmd" Oct 01 13:46:16 crc kubenswrapper[4605]: I1001 13:46:16.414406 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/1d9880b5-95f4-456c-8fbe-f349c69994d3-service-ca\") pod \"cluster-version-operator-5c965bbfc6-4tjmd\" (UID: \"1d9880b5-95f4-456c-8fbe-f349c69994d3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4tjmd" Oct 01 13:46:16 crc kubenswrapper[4605]: I1001 13:46:16.414433 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1d9880b5-95f4-456c-8fbe-f349c69994d3-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-4tjmd\" (UID: \"1d9880b5-95f4-456c-8fbe-f349c69994d3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4tjmd" Oct 01 13:46:16 crc kubenswrapper[4605]: I1001 13:46:16.414451 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/1d9880b5-95f4-456c-8fbe-f349c69994d3-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-4tjmd\" (UID: \"1d9880b5-95f4-456c-8fbe-f349c69994d3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4tjmd" Oct 01 13:46:16 crc kubenswrapper[4605]: I1001 13:46:16.414488 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/1d9880b5-95f4-456c-8fbe-f349c69994d3-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-4tjmd\" (UID: \"1d9880b5-95f4-456c-8fbe-f349c69994d3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4tjmd" Oct 01 13:46:16 crc kubenswrapper[4605]: I1001 13:46:16.414556 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/1d9880b5-95f4-456c-8fbe-f349c69994d3-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-4tjmd\" (UID: \"1d9880b5-95f4-456c-8fbe-f349c69994d3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4tjmd" Oct 01 13:46:16 crc kubenswrapper[4605]: I1001 13:46:16.415266 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/1d9880b5-95f4-456c-8fbe-f349c69994d3-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-4tjmd\" (UID: \"1d9880b5-95f4-456c-8fbe-f349c69994d3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4tjmd" Oct 01 13:46:16 crc kubenswrapper[4605]: I1001 13:46:16.415920 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/1d9880b5-95f4-456c-8fbe-f349c69994d3-service-ca\") pod \"cluster-version-operator-5c965bbfc6-4tjmd\" (UID: \"1d9880b5-95f4-456c-8fbe-f349c69994d3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4tjmd" Oct 01 13:46:16 crc kubenswrapper[4605]: I1001 13:46:16.423889 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1d9880b5-95f4-456c-8fbe-f349c69994d3-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-4tjmd\" (UID: \"1d9880b5-95f4-456c-8fbe-f349c69994d3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4tjmd" Oct 01 13:46:16 crc kubenswrapper[4605]: I1001 13:46:16.430446 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1d9880b5-95f4-456c-8fbe-f349c69994d3-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-4tjmd\" (UID: \"1d9880b5-95f4-456c-8fbe-f349c69994d3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4tjmd" Oct 01 13:46:16 crc kubenswrapper[4605]: I1001 13:46:16.548868 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4tjmd" Oct 01 13:46:16 crc kubenswrapper[4605]: I1001 13:46:16.634172 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4tjmd" event={"ID":"1d9880b5-95f4-456c-8fbe-f349c69994d3","Type":"ContainerStarted","Data":"692dde6517f9667ce4fcea5d296148fcc2dadea975f99d708bd33314d122df9a"} Oct 01 13:46:17 crc kubenswrapper[4605]: I1001 13:46:17.627261 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8c172ce5-f64e-417d-9fc7-e06c5e443fbc-metrics-certs\") pod \"network-metrics-daemon-m7ph7\" (UID: \"8c172ce5-f64e-417d-9fc7-e06c5e443fbc\") " pod="openshift-multus/network-metrics-daemon-m7ph7" Oct 01 13:46:17 crc kubenswrapper[4605]: E1001 13:46:17.627480 4605 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 01 13:46:17 crc kubenswrapper[4605]: E1001 13:46:17.627565 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8c172ce5-f64e-417d-9fc7-e06c5e443fbc-metrics-certs podName:8c172ce5-f64e-417d-9fc7-e06c5e443fbc nodeName:}" failed. No retries permitted until 2025-10-01 13:47:21.627547213 +0000 UTC m=+164.371523421 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/8c172ce5-f64e-417d-9fc7-e06c5e443fbc-metrics-certs") pod "network-metrics-daemon-m7ph7" (UID: "8c172ce5-f64e-417d-9fc7-e06c5e443fbc") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 01 13:46:17 crc kubenswrapper[4605]: I1001 13:46:17.640124 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4tjmd" event={"ID":"1d9880b5-95f4-456c-8fbe-f349c69994d3","Type":"ContainerStarted","Data":"7dd46de56771ceacff2a3d399494e7a4afd2923f3a4af96fcaa90ece14d50fbd"} Oct 01 13:46:17 crc kubenswrapper[4605]: I1001 13:46:17.655215 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4tjmd" podStartSLOduration=78.655192809 podStartE2EDuration="1m18.655192809s" podCreationTimestamp="2025-10-01 13:44:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 13:46:17.654669505 +0000 UTC m=+100.398645703" watchObservedRunningTime="2025-10-01 13:46:17.655192809 +0000 UTC m=+100.399169027" Oct 01 13:46:17 crc kubenswrapper[4605]: I1001 13:46:17.926246 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-m7ph7" Oct 01 13:46:17 crc kubenswrapper[4605]: I1001 13:46:17.926285 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:46:17 crc kubenswrapper[4605]: I1001 13:46:17.926324 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 13:46:17 crc kubenswrapper[4605]: I1001 13:46:17.926325 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 13:46:17 crc kubenswrapper[4605]: E1001 13:46:17.927377 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-m7ph7" podUID="8c172ce5-f64e-417d-9fc7-e06c5e443fbc" Oct 01 13:46:17 crc kubenswrapper[4605]: E1001 13:46:17.927513 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 13:46:17 crc kubenswrapper[4605]: E1001 13:46:17.927700 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 13:46:17 crc kubenswrapper[4605]: E1001 13:46:17.927807 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 13:46:19 crc kubenswrapper[4605]: I1001 13:46:19.925804 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 13:46:19 crc kubenswrapper[4605]: I1001 13:46:19.925854 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 13:46:19 crc kubenswrapper[4605]: E1001 13:46:19.925973 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 13:46:19 crc kubenswrapper[4605]: I1001 13:46:19.925993 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-m7ph7" Oct 01 13:46:19 crc kubenswrapper[4605]: I1001 13:46:19.926018 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:46:19 crc kubenswrapper[4605]: E1001 13:46:19.926064 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 13:46:19 crc kubenswrapper[4605]: E1001 13:46:19.926132 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 13:46:19 crc kubenswrapper[4605]: E1001 13:46:19.926550 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-m7ph7" podUID="8c172ce5-f64e-417d-9fc7-e06c5e443fbc" Oct 01 13:46:21 crc kubenswrapper[4605]: I1001 13:46:21.926393 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:46:21 crc kubenswrapper[4605]: I1001 13:46:21.926445 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-m7ph7" Oct 01 13:46:21 crc kubenswrapper[4605]: I1001 13:46:21.926426 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 13:46:21 crc kubenswrapper[4605]: I1001 13:46:21.926408 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 13:46:21 crc kubenswrapper[4605]: E1001 13:46:21.926552 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 13:46:21 crc kubenswrapper[4605]: E1001 13:46:21.926605 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 13:46:21 crc kubenswrapper[4605]: E1001 13:46:21.926666 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-m7ph7" podUID="8c172ce5-f64e-417d-9fc7-e06c5e443fbc" Oct 01 13:46:21 crc kubenswrapper[4605]: E1001 13:46:21.926796 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 13:46:23 crc kubenswrapper[4605]: I1001 13:46:23.925755 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 13:46:23 crc kubenswrapper[4605]: I1001 13:46:23.926322 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 13:46:23 crc kubenswrapper[4605]: E1001 13:46:23.927007 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 13:46:23 crc kubenswrapper[4605]: I1001 13:46:23.926492 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-m7ph7" Oct 01 13:46:23 crc kubenswrapper[4605]: I1001 13:46:23.926348 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:46:23 crc kubenswrapper[4605]: E1001 13:46:23.927082 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-m7ph7" podUID="8c172ce5-f64e-417d-9fc7-e06c5e443fbc" Oct 01 13:46:23 crc kubenswrapper[4605]: E1001 13:46:23.927262 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 13:46:23 crc kubenswrapper[4605]: E1001 13:46:23.927633 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 13:46:24 crc kubenswrapper[4605]: I1001 13:46:24.926593 4605 scope.go:117] "RemoveContainer" containerID="0be438218d8ebb0fffe42a99bb838e7aa749b7749c1ca18fb294cecb4fe8b4e4" Oct 01 13:46:24 crc kubenswrapper[4605]: E1001 13:46:24.926761 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-kzv4p_openshift-ovn-kubernetes(e0b90c02-c41c-4f5b-ae0a-c6444435a3ae)\"" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" podUID="e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" Oct 01 13:46:25 crc kubenswrapper[4605]: I1001 13:46:25.926212 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 13:46:25 crc kubenswrapper[4605]: I1001 13:46:25.926266 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 13:46:25 crc kubenswrapper[4605]: I1001 13:46:25.926267 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-m7ph7" Oct 01 13:46:25 crc kubenswrapper[4605]: E1001 13:46:25.926337 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 13:46:25 crc kubenswrapper[4605]: I1001 13:46:25.926212 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:46:25 crc kubenswrapper[4605]: E1001 13:46:25.926514 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 13:46:25 crc kubenswrapper[4605]: E1001 13:46:25.926575 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 13:46:25 crc kubenswrapper[4605]: E1001 13:46:25.926721 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-m7ph7" podUID="8c172ce5-f64e-417d-9fc7-e06c5e443fbc" Oct 01 13:46:27 crc kubenswrapper[4605]: I1001 13:46:27.926356 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 13:46:27 crc kubenswrapper[4605]: I1001 13:46:27.926392 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-m7ph7" Oct 01 13:46:27 crc kubenswrapper[4605]: I1001 13:46:27.927233 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 13:46:27 crc kubenswrapper[4605]: I1001 13:46:27.928608 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:46:27 crc kubenswrapper[4605]: E1001 13:46:27.928605 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 13:46:27 crc kubenswrapper[4605]: E1001 13:46:27.928729 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-m7ph7" podUID="8c172ce5-f64e-417d-9fc7-e06c5e443fbc" Oct 01 13:46:27 crc kubenswrapper[4605]: E1001 13:46:27.928864 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 13:46:27 crc kubenswrapper[4605]: E1001 13:46:27.928954 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 13:46:29 crc kubenswrapper[4605]: I1001 13:46:29.926503 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 13:46:29 crc kubenswrapper[4605]: I1001 13:46:29.926504 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:46:29 crc kubenswrapper[4605]: I1001 13:46:29.926576 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-m7ph7" Oct 01 13:46:29 crc kubenswrapper[4605]: I1001 13:46:29.926581 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 13:46:29 crc kubenswrapper[4605]: E1001 13:46:29.926693 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 13:46:29 crc kubenswrapper[4605]: E1001 13:46:29.926728 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 13:46:29 crc kubenswrapper[4605]: E1001 13:46:29.926778 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-m7ph7" podUID="8c172ce5-f64e-417d-9fc7-e06c5e443fbc" Oct 01 13:46:29 crc kubenswrapper[4605]: E1001 13:46:29.926845 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 13:46:31 crc kubenswrapper[4605]: I1001 13:46:31.926323 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:46:31 crc kubenswrapper[4605]: E1001 13:46:31.926829 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 13:46:31 crc kubenswrapper[4605]: I1001 13:46:31.926353 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 13:46:31 crc kubenswrapper[4605]: E1001 13:46:31.926917 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 13:46:31 crc kubenswrapper[4605]: I1001 13:46:31.926379 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 13:46:31 crc kubenswrapper[4605]: E1001 13:46:31.926983 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 13:46:31 crc kubenswrapper[4605]: I1001 13:46:31.926342 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-m7ph7" Oct 01 13:46:31 crc kubenswrapper[4605]: E1001 13:46:31.927085 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-m7ph7" podUID="8c172ce5-f64e-417d-9fc7-e06c5e443fbc" Oct 01 13:46:33 crc kubenswrapper[4605]: I1001 13:46:33.694634 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-wgx5p_1c2ca71f-4cb0-4852-927d-af69be5d77f2/kube-multus/1.log" Oct 01 13:46:33 crc kubenswrapper[4605]: I1001 13:46:33.695779 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-wgx5p_1c2ca71f-4cb0-4852-927d-af69be5d77f2/kube-multus/0.log" Oct 01 13:46:33 crc kubenswrapper[4605]: I1001 13:46:33.695823 4605 generic.go:334] "Generic (PLEG): container finished" podID="1c2ca71f-4cb0-4852-927d-af69be5d77f2" containerID="2c2a3ea80efc4b22dd64e7627c18212081f51882aa7616ba8bfa3f2b116f4bf9" exitCode=1 Oct 01 13:46:33 crc kubenswrapper[4605]: I1001 13:46:33.695857 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-wgx5p" event={"ID":"1c2ca71f-4cb0-4852-927d-af69be5d77f2","Type":"ContainerDied","Data":"2c2a3ea80efc4b22dd64e7627c18212081f51882aa7616ba8bfa3f2b116f4bf9"} Oct 01 13:46:33 crc kubenswrapper[4605]: I1001 13:46:33.695904 4605 scope.go:117] "RemoveContainer" containerID="1180cac382ded8ae1a7be2e5738d96beceed10f750d31e36ae1520416a71e8dd" Oct 01 13:46:33 crc kubenswrapper[4605]: I1001 13:46:33.696374 4605 scope.go:117] "RemoveContainer" containerID="2c2a3ea80efc4b22dd64e7627c18212081f51882aa7616ba8bfa3f2b116f4bf9" Oct 01 13:46:33 crc kubenswrapper[4605]: E1001 13:46:33.696531 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-wgx5p_openshift-multus(1c2ca71f-4cb0-4852-927d-af69be5d77f2)\"" pod="openshift-multus/multus-wgx5p" podUID="1c2ca71f-4cb0-4852-927d-af69be5d77f2" Oct 01 13:46:33 crc kubenswrapper[4605]: I1001 13:46:33.926427 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 13:46:33 crc kubenswrapper[4605]: I1001 13:46:33.926443 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-m7ph7" Oct 01 13:46:33 crc kubenswrapper[4605]: I1001 13:46:33.926531 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 13:46:33 crc kubenswrapper[4605]: I1001 13:46:33.926705 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:46:33 crc kubenswrapper[4605]: E1001 13:46:33.926689 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 13:46:33 crc kubenswrapper[4605]: E1001 13:46:33.926834 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-m7ph7" podUID="8c172ce5-f64e-417d-9fc7-e06c5e443fbc" Oct 01 13:46:33 crc kubenswrapper[4605]: E1001 13:46:33.926892 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 13:46:33 crc kubenswrapper[4605]: E1001 13:46:33.926987 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 13:46:34 crc kubenswrapper[4605]: I1001 13:46:34.701926 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-wgx5p_1c2ca71f-4cb0-4852-927d-af69be5d77f2/kube-multus/1.log" Oct 01 13:46:35 crc kubenswrapper[4605]: I1001 13:46:35.926356 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 13:46:35 crc kubenswrapper[4605]: I1001 13:46:35.926382 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-m7ph7" Oct 01 13:46:35 crc kubenswrapper[4605]: E1001 13:46:35.926585 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 13:46:35 crc kubenswrapper[4605]: I1001 13:46:35.926515 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 13:46:35 crc kubenswrapper[4605]: E1001 13:46:35.926699 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-m7ph7" podUID="8c172ce5-f64e-417d-9fc7-e06c5e443fbc" Oct 01 13:46:35 crc kubenswrapper[4605]: E1001 13:46:35.926845 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 13:46:35 crc kubenswrapper[4605]: I1001 13:46:35.927258 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:46:35 crc kubenswrapper[4605]: E1001 13:46:35.927375 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 13:46:37 crc kubenswrapper[4605]: I1001 13:46:37.926678 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:46:37 crc kubenswrapper[4605]: I1001 13:46:37.926702 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 13:46:37 crc kubenswrapper[4605]: I1001 13:46:37.926702 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-m7ph7" Oct 01 13:46:37 crc kubenswrapper[4605]: E1001 13:46:37.926804 4605 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Oct 01 13:46:37 crc kubenswrapper[4605]: E1001 13:46:37.928410 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 13:46:37 crc kubenswrapper[4605]: E1001 13:46:37.928451 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 13:46:37 crc kubenswrapper[4605]: E1001 13:46:37.928532 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-m7ph7" podUID="8c172ce5-f64e-417d-9fc7-e06c5e443fbc" Oct 01 13:46:37 crc kubenswrapper[4605]: I1001 13:46:37.928866 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 13:46:37 crc kubenswrapper[4605]: E1001 13:46:37.929151 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 13:46:38 crc kubenswrapper[4605]: E1001 13:46:38.017288 4605 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 01 13:46:39 crc kubenswrapper[4605]: I1001 13:46:39.926048 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 13:46:39 crc kubenswrapper[4605]: E1001 13:46:39.926835 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 13:46:39 crc kubenswrapper[4605]: I1001 13:46:39.926169 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:46:39 crc kubenswrapper[4605]: E1001 13:46:39.927017 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 13:46:39 crc kubenswrapper[4605]: I1001 13:46:39.926737 4605 scope.go:117] "RemoveContainer" containerID="0be438218d8ebb0fffe42a99bb838e7aa749b7749c1ca18fb294cecb4fe8b4e4" Oct 01 13:46:39 crc kubenswrapper[4605]: I1001 13:46:39.926132 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-m7ph7" Oct 01 13:46:39 crc kubenswrapper[4605]: E1001 13:46:39.927531 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-m7ph7" podUID="8c172ce5-f64e-417d-9fc7-e06c5e443fbc" Oct 01 13:46:39 crc kubenswrapper[4605]: I1001 13:46:39.926477 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 13:46:39 crc kubenswrapper[4605]: E1001 13:46:39.927691 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 13:46:40 crc kubenswrapper[4605]: I1001 13:46:40.720704 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-kzv4p_e0b90c02-c41c-4f5b-ae0a-c6444435a3ae/ovnkube-controller/3.log" Oct 01 13:46:40 crc kubenswrapper[4605]: I1001 13:46:40.723432 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" event={"ID":"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae","Type":"ContainerStarted","Data":"250086e7d7b8cdd7761b7001f3eca6b6ea1a3008cffc0778f2c1b6c092578ae4"} Oct 01 13:46:40 crc kubenswrapper[4605]: I1001 13:46:40.723788 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" Oct 01 13:46:40 crc kubenswrapper[4605]: I1001 13:46:40.915607 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" podStartSLOduration=101.915585039 podStartE2EDuration="1m41.915585039s" podCreationTimestamp="2025-10-01 13:44:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 13:46:40.74885351 +0000 UTC m=+123.492829718" watchObservedRunningTime="2025-10-01 13:46:40.915585039 +0000 UTC m=+123.659561247" Oct 01 13:46:40 crc kubenswrapper[4605]: I1001 13:46:40.915981 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-m7ph7"] Oct 01 13:46:40 crc kubenswrapper[4605]: I1001 13:46:40.916076 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-m7ph7" Oct 01 13:46:40 crc kubenswrapper[4605]: E1001 13:46:40.916220 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-m7ph7" podUID="8c172ce5-f64e-417d-9fc7-e06c5e443fbc" Oct 01 13:46:41 crc kubenswrapper[4605]: I1001 13:46:41.926425 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:46:41 crc kubenswrapper[4605]: I1001 13:46:41.926572 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 13:46:41 crc kubenswrapper[4605]: E1001 13:46:41.926636 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 13:46:41 crc kubenswrapper[4605]: I1001 13:46:41.926691 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 13:46:41 crc kubenswrapper[4605]: E1001 13:46:41.926809 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 13:46:41 crc kubenswrapper[4605]: E1001 13:46:41.926900 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 13:46:42 crc kubenswrapper[4605]: I1001 13:46:42.925962 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-m7ph7" Oct 01 13:46:42 crc kubenswrapper[4605]: E1001 13:46:42.926344 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-m7ph7" podUID="8c172ce5-f64e-417d-9fc7-e06c5e443fbc" Oct 01 13:46:43 crc kubenswrapper[4605]: E1001 13:46:43.019539 4605 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 01 13:46:43 crc kubenswrapper[4605]: I1001 13:46:43.928898 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 13:46:43 crc kubenswrapper[4605]: E1001 13:46:43.929016 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 13:46:43 crc kubenswrapper[4605]: I1001 13:46:43.929109 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 13:46:43 crc kubenswrapper[4605]: E1001 13:46:43.929167 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 13:46:43 crc kubenswrapper[4605]: I1001 13:46:43.929223 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:46:43 crc kubenswrapper[4605]: E1001 13:46:43.929356 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 13:46:44 crc kubenswrapper[4605]: I1001 13:46:44.925980 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-m7ph7" Oct 01 13:46:44 crc kubenswrapper[4605]: E1001 13:46:44.926151 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-m7ph7" podUID="8c172ce5-f64e-417d-9fc7-e06c5e443fbc" Oct 01 13:46:45 crc kubenswrapper[4605]: I1001 13:46:45.926205 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 13:46:45 crc kubenswrapper[4605]: I1001 13:46:45.926205 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:46:45 crc kubenswrapper[4605]: E1001 13:46:45.926608 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 13:46:45 crc kubenswrapper[4605]: E1001 13:46:45.926663 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 13:46:45 crc kubenswrapper[4605]: I1001 13:46:45.926375 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 13:46:45 crc kubenswrapper[4605]: I1001 13:46:45.926711 4605 scope.go:117] "RemoveContainer" containerID="2c2a3ea80efc4b22dd64e7627c18212081f51882aa7616ba8bfa3f2b116f4bf9" Oct 01 13:46:45 crc kubenswrapper[4605]: E1001 13:46:45.926724 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 13:46:46 crc kubenswrapper[4605]: I1001 13:46:46.744816 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-wgx5p_1c2ca71f-4cb0-4852-927d-af69be5d77f2/kube-multus/1.log" Oct 01 13:46:46 crc kubenswrapper[4605]: I1001 13:46:46.744877 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-wgx5p" event={"ID":"1c2ca71f-4cb0-4852-927d-af69be5d77f2","Type":"ContainerStarted","Data":"ecb12288b76f5a9b7386a594a3210e745efc153651dcd926e6d3a04db0c2a2ee"} Oct 01 13:46:46 crc kubenswrapper[4605]: I1001 13:46:46.926426 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-m7ph7" Oct 01 13:46:46 crc kubenswrapper[4605]: E1001 13:46:46.926591 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-m7ph7" podUID="8c172ce5-f64e-417d-9fc7-e06c5e443fbc" Oct 01 13:46:47 crc kubenswrapper[4605]: I1001 13:46:47.926028 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 13:46:47 crc kubenswrapper[4605]: I1001 13:46:47.926143 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 13:46:47 crc kubenswrapper[4605]: I1001 13:46:47.926086 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:46:47 crc kubenswrapper[4605]: E1001 13:46:47.926958 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 13:46:47 crc kubenswrapper[4605]: E1001 13:46:47.927010 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 13:46:47 crc kubenswrapper[4605]: E1001 13:46:47.927076 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 13:46:48 crc kubenswrapper[4605]: I1001 13:46:48.926219 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-m7ph7" Oct 01 13:46:48 crc kubenswrapper[4605]: I1001 13:46:48.928650 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Oct 01 13:46:48 crc kubenswrapper[4605]: I1001 13:46:48.931170 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Oct 01 13:46:49 crc kubenswrapper[4605]: I1001 13:46:49.926626 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:46:49 crc kubenswrapper[4605]: I1001 13:46:49.926698 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 13:46:49 crc kubenswrapper[4605]: I1001 13:46:49.926682 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 13:46:49 crc kubenswrapper[4605]: I1001 13:46:49.930655 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Oct 01 13:46:49 crc kubenswrapper[4605]: I1001 13:46:49.930696 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Oct 01 13:46:49 crc kubenswrapper[4605]: I1001 13:46:49.930917 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Oct 01 13:46:49 crc kubenswrapper[4605]: I1001 13:46:49.931592 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Oct 01 13:46:56 crc kubenswrapper[4605]: I1001 13:46:56.959697 4605 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Oct 01 13:46:56 crc kubenswrapper[4605]: I1001 13:46:56.995756 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-lf6wn"] Oct 01 13:46:56 crc kubenswrapper[4605]: I1001 13:46:56.996308 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-lf6wn" Oct 01 13:46:56 crc kubenswrapper[4605]: I1001 13:46:56.996457 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-zv9xk"] Oct 01 13:46:56 crc kubenswrapper[4605]: I1001 13:46:56.996906 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-zv9xk" Oct 01 13:46:56 crc kubenswrapper[4605]: I1001 13:46:56.998925 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-hfdjq"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:56.999229 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hfdjq" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:56.999366 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-pxc25"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:56.999904 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pxc25" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.000347 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-qmh5z"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.000883 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-qmh5z" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.001857 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-2k22x"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.002629 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-2k22x" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.003746 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2bmnh"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.005224 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-48sxr"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.005425 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2bmnh" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.006084 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-48sxr" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.007060 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-6pn8g"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.019081 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-8vpb7"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.020535 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-6pn8g" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.022365 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-trd6j"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.022890 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-2brwf"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.023413 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-2bkjp"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.023855 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8mkj9"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.030539 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8mkj9" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.024396 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-trd6j" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.024428 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-8vpb7" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.024453 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-2brwf" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.025872 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-2bkjp" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.032672 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.032696 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.034110 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.034201 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.034280 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.034361 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.034451 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.032733 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.032757 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.032781 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.032803 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.032825 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.032847 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.032870 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.032886 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.032908 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.032931 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.032955 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.032977 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.032994 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.033014 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.033264 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.033306 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.035195 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.033332 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.033418 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.036406 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.045273 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.045452 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.045500 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.045605 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.055301 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.055394 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.055614 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.055766 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.055899 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.056184 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.056265 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.056936 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/26fe0021-1c2a-4f4e-a6cb-86237a120608-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-lf6wn\" (UID: \"26fe0021-1c2a-4f4e-a6cb-86237a120608\") " pod="openshift-controller-manager/controller-manager-879f6c89f-lf6wn" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.057071 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/88eea53a-1d70-43d0-a8a7-9fa681b09772-auth-proxy-config\") pod \"machine-approver-56656f9798-zv9xk\" (UID: \"88eea53a-1d70-43d0-a8a7-9fa681b09772\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-zv9xk" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.057193 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mzrh8\" (UniqueName: \"kubernetes.io/projected/26fe0021-1c2a-4f4e-a6cb-86237a120608-kube-api-access-mzrh8\") pod \"controller-manager-879f6c89f-lf6wn\" (UID: \"26fe0021-1c2a-4f4e-a6cb-86237a120608\") " pod="openshift-controller-manager/controller-manager-879f6c89f-lf6wn" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.057316 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9pqz9\" (UniqueName: \"kubernetes.io/projected/88eea53a-1d70-43d0-a8a7-9fa681b09772-kube-api-access-9pqz9\") pod \"machine-approver-56656f9798-zv9xk\" (UID: \"88eea53a-1d70-43d0-a8a7-9fa681b09772\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-zv9xk" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.057433 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/26fe0021-1c2a-4f4e-a6cb-86237a120608-client-ca\") pod \"controller-manager-879f6c89f-lf6wn\" (UID: \"26fe0021-1c2a-4f4e-a6cb-86237a120608\") " pod="openshift-controller-manager/controller-manager-879f6c89f-lf6wn" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.057545 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/88eea53a-1d70-43d0-a8a7-9fa681b09772-machine-approver-tls\") pod \"machine-approver-56656f9798-zv9xk\" (UID: \"88eea53a-1d70-43d0-a8a7-9fa681b09772\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-zv9xk" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.057658 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-frcm4\" (UniqueName: \"kubernetes.io/projected/e87d23b8-e74b-4fa9-8f83-760ab58e224d-kube-api-access-frcm4\") pod \"route-controller-manager-6576b87f9c-hfdjq\" (UID: \"e87d23b8-e74b-4fa9-8f83-760ab58e224d\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hfdjq" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.057746 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e87d23b8-e74b-4fa9-8f83-760ab58e224d-client-ca\") pod \"route-controller-manager-6576b87f9c-hfdjq\" (UID: \"e87d23b8-e74b-4fa9-8f83-760ab58e224d\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hfdjq" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.057914 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/26fe0021-1c2a-4f4e-a6cb-86237a120608-serving-cert\") pod \"controller-manager-879f6c89f-lf6wn\" (UID: \"26fe0021-1c2a-4f4e-a6cb-86237a120608\") " pod="openshift-controller-manager/controller-manager-879f6c89f-lf6wn" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.058011 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e87d23b8-e74b-4fa9-8f83-760ab58e224d-config\") pod \"route-controller-manager-6576b87f9c-hfdjq\" (UID: \"e87d23b8-e74b-4fa9-8f83-760ab58e224d\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hfdjq" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.058107 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/26fe0021-1c2a-4f4e-a6cb-86237a120608-config\") pod \"controller-manager-879f6c89f-lf6wn\" (UID: \"26fe0021-1c2a-4f4e-a6cb-86237a120608\") " pod="openshift-controller-manager/controller-manager-879f6c89f-lf6wn" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.058199 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/88eea53a-1d70-43d0-a8a7-9fa681b09772-config\") pod \"machine-approver-56656f9798-zv9xk\" (UID: \"88eea53a-1d70-43d0-a8a7-9fa681b09772\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-zv9xk" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.058288 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e87d23b8-e74b-4fa9-8f83-760ab58e224d-serving-cert\") pod \"route-controller-manager-6576b87f9c-hfdjq\" (UID: \"e87d23b8-e74b-4fa9-8f83-760ab58e224d\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hfdjq" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.061303 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.061576 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.061743 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.061757 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.061960 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.062138 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.062144 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.062219 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.062305 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.061576 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.062394 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.062548 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.062629 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.062703 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.062702 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.063166 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.063288 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.063561 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.063762 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.063866 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.063960 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.081731 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.081920 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.082034 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.082264 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.082588 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.082710 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.082776 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.083186 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.083515 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.083633 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.083967 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.084254 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.084707 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.085154 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.085195 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.085397 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.094829 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.094996 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.095181 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.098583 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-b84ph"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.099197 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.099288 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-b84ph" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.099363 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.101121 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.101342 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.101576 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.101657 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.101732 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.111570 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.113500 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.114859 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.117419 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.125757 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.127471 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rx67q"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.128060 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dpknn"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.128582 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-lf6wn"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.128750 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dpknn" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.129176 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rx67q" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.129555 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.130611 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.130748 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.130801 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.131211 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.133337 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.135426 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.137345 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.139909 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-ncbbt"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.140336 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-j6crh"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.141760 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.155263 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.141853 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.142054 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.142157 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.159000 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/9b2768b9-4054-4f67-a937-2050eebe9c2f-etcd-client\") pod \"apiserver-76f77b778f-qmh5z\" (UID: \"9b2768b9-4054-4f67-a937-2050eebe9c2f\") " pod="openshift-apiserver/apiserver-76f77b778f-qmh5z" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.159066 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jdfdx\" (UniqueName: \"kubernetes.io/projected/9b2768b9-4054-4f67-a937-2050eebe9c2f-kube-api-access-jdfdx\") pod \"apiserver-76f77b778f-qmh5z\" (UID: \"9b2768b9-4054-4f67-a937-2050eebe9c2f\") " pod="openshift-apiserver/apiserver-76f77b778f-qmh5z" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.159109 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/6334be24-ba19-4acc-afdf-74a88a104fce-metrics-tls\") pod \"dns-operator-744455d44c-8vpb7\" (UID: \"6334be24-ba19-4acc-afdf-74a88a104fce\") " pod="openshift-dns-operator/dns-operator-744455d44c-8vpb7" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.159132 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pjs2q\" (UniqueName: \"kubernetes.io/projected/161a8f41-2f6c-49d4-9efe-4d27a50ed622-kube-api-access-pjs2q\") pod \"authentication-operator-69f744f599-6pn8g\" (UID: \"161a8f41-2f6c-49d4-9efe-4d27a50ed622\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6pn8g" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.159166 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cktqn\" (UniqueName: \"kubernetes.io/projected/33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb-kube-api-access-cktqn\") pod \"console-f9d7485db-2brwf\" (UID: \"33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb\") " pod="openshift-console/console-f9d7485db-2brwf" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.159185 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7szsr\" (UniqueName: \"kubernetes.io/projected/7f6de1d9-61b5-4cc6-a820-5492052b60ef-kube-api-access-7szsr\") pod \"oauth-openshift-558db77b4-trd6j\" (UID: \"7f6de1d9-61b5-4cc6-a820-5492052b60ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-trd6j" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.159251 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/88eea53a-1d70-43d0-a8a7-9fa681b09772-auth-proxy-config\") pod \"machine-approver-56656f9798-zv9xk\" (UID: \"88eea53a-1d70-43d0-a8a7-9fa681b09772\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-zv9xk" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.159271 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/7f6de1d9-61b5-4cc6-a820-5492052b60ef-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-trd6j\" (UID: \"7f6de1d9-61b5-4cc6-a820-5492052b60ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-trd6j" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.159291 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f21a4520-2f55-4e46-9243-287aa9ae7189-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-8mkj9\" (UID: \"f21a4520-2f55-4e46-9243-287aa9ae7189\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8mkj9" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.159311 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/161a8f41-2f6c-49d4-9efe-4d27a50ed622-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-6pn8g\" (UID: \"161a8f41-2f6c-49d4-9efe-4d27a50ed622\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6pn8g" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.159327 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7df6d80-4ed7-4192-b079-bd0119903e10-serving-cert\") pod \"openshift-config-operator-7777fb866f-b84ph\" (UID: \"e7df6d80-4ed7-4192-b079-bd0119903e10\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-b84ph" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.159350 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mzrh8\" (UniqueName: \"kubernetes.io/projected/26fe0021-1c2a-4f4e-a6cb-86237a120608-kube-api-access-mzrh8\") pod \"controller-manager-879f6c89f-lf6wn\" (UID: \"26fe0021-1c2a-4f4e-a6cb-86237a120608\") " pod="openshift-controller-manager/controller-manager-879f6c89f-lf6wn" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.159370 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/26fe0021-1c2a-4f4e-a6cb-86237a120608-client-ca\") pod \"controller-manager-879f6c89f-lf6wn\" (UID: \"26fe0021-1c2a-4f4e-a6cb-86237a120608\") " pod="openshift-controller-manager/controller-manager-879f6c89f-lf6wn" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.159476 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9pqz9\" (UniqueName: \"kubernetes.io/projected/88eea53a-1d70-43d0-a8a7-9fa681b09772-kube-api-access-9pqz9\") pod \"machine-approver-56656f9798-zv9xk\" (UID: \"88eea53a-1d70-43d0-a8a7-9fa681b09772\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-zv9xk" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.159500 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/9b2768b9-4054-4f67-a937-2050eebe9c2f-node-pullsecrets\") pod \"apiserver-76f77b778f-qmh5z\" (UID: \"9b2768b9-4054-4f67-a937-2050eebe9c2f\") " pod="openshift-apiserver/apiserver-76f77b778f-qmh5z" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.159542 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/9b2768b9-4054-4f67-a937-2050eebe9c2f-image-import-ca\") pod \"apiserver-76f77b778f-qmh5z\" (UID: \"9b2768b9-4054-4f67-a937-2050eebe9c2f\") " pod="openshift-apiserver/apiserver-76f77b778f-qmh5z" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.159566 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/7f6de1d9-61b5-4cc6-a820-5492052b60ef-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-trd6j\" (UID: \"7f6de1d9-61b5-4cc6-a820-5492052b60ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-trd6j" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.159587 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/88eea53a-1d70-43d0-a8a7-9fa681b09772-machine-approver-tls\") pod \"machine-approver-56656f9798-zv9xk\" (UID: \"88eea53a-1d70-43d0-a8a7-9fa681b09772\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-zv9xk" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.159631 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9b2768b9-4054-4f67-a937-2050eebe9c2f-config\") pod \"apiserver-76f77b778f-qmh5z\" (UID: \"9b2768b9-4054-4f67-a937-2050eebe9c2f\") " pod="openshift-apiserver/apiserver-76f77b778f-qmh5z" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.159652 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/9b2768b9-4054-4f67-a937-2050eebe9c2f-audit-dir\") pod \"apiserver-76f77b778f-qmh5z\" (UID: \"9b2768b9-4054-4f67-a937-2050eebe9c2f\") " pod="openshift-apiserver/apiserver-76f77b778f-qmh5z" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.159704 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb-oauth-serving-cert\") pod \"console-f9d7485db-2brwf\" (UID: \"33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb\") " pod="openshift-console/console-f9d7485db-2brwf" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.159727 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c55cb7b3-eaaf-486b-940e-d6ffd06ce8da-trusted-ca\") pod \"console-operator-58897d9998-2bkjp\" (UID: \"c55cb7b3-eaaf-486b-940e-d6ffd06ce8da\") " pod="openshift-console-operator/console-operator-58897d9998-2bkjp" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.159749 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/7f6de1d9-61b5-4cc6-a820-5492052b60ef-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-trd6j\" (UID: \"7f6de1d9-61b5-4cc6-a820-5492052b60ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-trd6j" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.159807 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9b2768b9-4054-4f67-a937-2050eebe9c2f-trusted-ca-bundle\") pod \"apiserver-76f77b778f-qmh5z\" (UID: \"9b2768b9-4054-4f67-a937-2050eebe9c2f\") " pod="openshift-apiserver/apiserver-76f77b778f-qmh5z" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.159828 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb-service-ca\") pod \"console-f9d7485db-2brwf\" (UID: \"33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb\") " pod="openshift-console/console-f9d7485db-2brwf" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.159866 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7f6de1d9-61b5-4cc6-a820-5492052b60ef-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-trd6j\" (UID: \"7f6de1d9-61b5-4cc6-a820-5492052b60ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-trd6j" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.159889 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-frcm4\" (UniqueName: \"kubernetes.io/projected/e87d23b8-e74b-4fa9-8f83-760ab58e224d-kube-api-access-frcm4\") pod \"route-controller-manager-6576b87f9c-hfdjq\" (UID: \"e87d23b8-e74b-4fa9-8f83-760ab58e224d\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hfdjq" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.159911 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/7f6de1d9-61b5-4cc6-a820-5492052b60ef-audit-policies\") pod \"oauth-openshift-558db77b4-trd6j\" (UID: \"7f6de1d9-61b5-4cc6-a820-5492052b60ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-trd6j" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.159930 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-chcfm\" (UniqueName: \"kubernetes.io/projected/762e836a-1722-4e01-982d-023b84748aa4-kube-api-access-chcfm\") pod \"machine-api-operator-5694c8668f-2k22x\" (UID: \"762e836a-1722-4e01-982d-023b84748aa4\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-2k22x" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.159950 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5dshz\" (UniqueName: \"kubernetes.io/projected/04d0121b-ec20-4607-9a6b-89ccaa4a4d57-kube-api-access-5dshz\") pod \"apiserver-7bbb656c7d-pxc25\" (UID: \"04d0121b-ec20-4607-9a6b-89ccaa4a4d57\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pxc25" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.159971 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/7f6de1d9-61b5-4cc6-a820-5492052b60ef-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-trd6j\" (UID: \"7f6de1d9-61b5-4cc6-a820-5492052b60ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-trd6j" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.159992 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e87d23b8-e74b-4fa9-8f83-760ab58e224d-client-ca\") pod \"route-controller-manager-6576b87f9c-hfdjq\" (UID: \"e87d23b8-e74b-4fa9-8f83-760ab58e224d\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hfdjq" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.160010 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/9b2768b9-4054-4f67-a937-2050eebe9c2f-etcd-serving-ca\") pod \"apiserver-76f77b778f-qmh5z\" (UID: \"9b2768b9-4054-4f67-a937-2050eebe9c2f\") " pod="openshift-apiserver/apiserver-76f77b778f-qmh5z" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.160028 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/9b2768b9-4054-4f67-a937-2050eebe9c2f-encryption-config\") pod \"apiserver-76f77b778f-qmh5z\" (UID: \"9b2768b9-4054-4f67-a937-2050eebe9c2f\") " pod="openshift-apiserver/apiserver-76f77b778f-qmh5z" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.160049 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/04d0121b-ec20-4607-9a6b-89ccaa4a4d57-encryption-config\") pod \"apiserver-7bbb656c7d-pxc25\" (UID: \"04d0121b-ec20-4607-9a6b-89ccaa4a4d57\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pxc25" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.160070 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/7f6de1d9-61b5-4cc6-a820-5492052b60ef-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-trd6j\" (UID: \"7f6de1d9-61b5-4cc6-a820-5492052b60ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-trd6j" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.160118 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/26fe0021-1c2a-4f4e-a6cb-86237a120608-serving-cert\") pod \"controller-manager-879f6c89f-lf6wn\" (UID: \"26fe0021-1c2a-4f4e-a6cb-86237a120608\") " pod="openshift-controller-manager/controller-manager-879f6c89f-lf6wn" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.160135 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb-trusted-ca-bundle\") pod \"console-f9d7485db-2brwf\" (UID: \"33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb\") " pod="openshift-console/console-f9d7485db-2brwf" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.160154 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c55cb7b3-eaaf-486b-940e-d6ffd06ce8da-config\") pod \"console-operator-58897d9998-2bkjp\" (UID: \"c55cb7b3-eaaf-486b-940e-d6ffd06ce8da\") " pod="openshift-console-operator/console-operator-58897d9998-2bkjp" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.160182 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/7f6de1d9-61b5-4cc6-a820-5492052b60ef-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-trd6j\" (UID: \"7f6de1d9-61b5-4cc6-a820-5492052b60ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-trd6j" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.160202 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rgqtw\" (UniqueName: \"kubernetes.io/projected/0c93dc9b-fba0-4d28-b8b3-8def5f66d466-kube-api-access-rgqtw\") pod \"downloads-7954f5f757-48sxr\" (UID: \"0c93dc9b-fba0-4d28-b8b3-8def5f66d466\") " pod="openshift-console/downloads-7954f5f757-48sxr" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.160218 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb-console-serving-cert\") pod \"console-f9d7485db-2brwf\" (UID: \"33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb\") " pod="openshift-console/console-f9d7485db-2brwf" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.160236 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/161a8f41-2f6c-49d4-9efe-4d27a50ed622-serving-cert\") pod \"authentication-operator-69f744f599-6pn8g\" (UID: \"161a8f41-2f6c-49d4-9efe-4d27a50ed622\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6pn8g" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.160256 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gxd62\" (UniqueName: \"kubernetes.io/projected/e7df6d80-4ed7-4192-b079-bd0119903e10-kube-api-access-gxd62\") pod \"openshift-config-operator-7777fb866f-b84ph\" (UID: \"e7df6d80-4ed7-4192-b079-bd0119903e10\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-b84ph" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.160277 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e87d23b8-e74b-4fa9-8f83-760ab58e224d-config\") pod \"route-controller-manager-6576b87f9c-hfdjq\" (UID: \"e87d23b8-e74b-4fa9-8f83-760ab58e224d\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hfdjq" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.160296 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/26fe0021-1c2a-4f4e-a6cb-86237a120608-config\") pod \"controller-manager-879f6c89f-lf6wn\" (UID: \"26fe0021-1c2a-4f4e-a6cb-86237a120608\") " pod="openshift-controller-manager/controller-manager-879f6c89f-lf6wn" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.160311 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/762e836a-1722-4e01-982d-023b84748aa4-config\") pod \"machine-api-operator-5694c8668f-2k22x\" (UID: \"762e836a-1722-4e01-982d-023b84748aa4\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-2k22x" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.160330 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/762e836a-1722-4e01-982d-023b84748aa4-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-2k22x\" (UID: \"762e836a-1722-4e01-982d-023b84748aa4\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-2k22x" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.160350 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/88eea53a-1d70-43d0-a8a7-9fa681b09772-config\") pod \"machine-approver-56656f9798-zv9xk\" (UID: \"88eea53a-1d70-43d0-a8a7-9fa681b09772\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-zv9xk" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.160368 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/04d0121b-ec20-4607-9a6b-89ccaa4a4d57-audit-policies\") pod \"apiserver-7bbb656c7d-pxc25\" (UID: \"04d0121b-ec20-4607-9a6b-89ccaa4a4d57\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pxc25" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.160384 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c55cb7b3-eaaf-486b-940e-d6ffd06ce8da-serving-cert\") pod \"console-operator-58897d9998-2bkjp\" (UID: \"c55cb7b3-eaaf-486b-940e-d6ffd06ce8da\") " pod="openshift-console-operator/console-operator-58897d9998-2bkjp" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.160402 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w4tt9\" (UniqueName: \"kubernetes.io/projected/6334be24-ba19-4acc-afdf-74a88a104fce-kube-api-access-w4tt9\") pod \"dns-operator-744455d44c-8vpb7\" (UID: \"6334be24-ba19-4acc-afdf-74a88a104fce\") " pod="openshift-dns-operator/dns-operator-744455d44c-8vpb7" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.160421 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/161a8f41-2f6c-49d4-9efe-4d27a50ed622-config\") pod \"authentication-operator-69f744f599-6pn8g\" (UID: \"161a8f41-2f6c-49d4-9efe-4d27a50ed622\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6pn8g" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.160440 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e87d23b8-e74b-4fa9-8f83-760ab58e224d-serving-cert\") pod \"route-controller-manager-6576b87f9c-hfdjq\" (UID: \"e87d23b8-e74b-4fa9-8f83-760ab58e224d\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hfdjq" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.160455 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7f6de1d9-61b5-4cc6-a820-5492052b60ef-audit-dir\") pod \"oauth-openshift-558db77b4-trd6j\" (UID: \"7f6de1d9-61b5-4cc6-a820-5492052b60ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-trd6j" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.160483 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/161a8f41-2f6c-49d4-9efe-4d27a50ed622-service-ca-bundle\") pod \"authentication-operator-69f744f599-6pn8g\" (UID: \"161a8f41-2f6c-49d4-9efe-4d27a50ed622\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6pn8g" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.160502 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/9b2768b9-4054-4f67-a937-2050eebe9c2f-audit\") pod \"apiserver-76f77b778f-qmh5z\" (UID: \"9b2768b9-4054-4f67-a937-2050eebe9c2f\") " pod="openshift-apiserver/apiserver-76f77b778f-qmh5z" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.160521 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/e7df6d80-4ed7-4192-b079-bd0119903e10-available-featuregates\") pod \"openshift-config-operator-7777fb866f-b84ph\" (UID: \"e7df6d80-4ed7-4192-b079-bd0119903e10\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-b84ph" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.160570 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mxsdg\" (UniqueName: \"kubernetes.io/projected/c55cb7b3-eaaf-486b-940e-d6ffd06ce8da-kube-api-access-mxsdg\") pod \"console-operator-58897d9998-2bkjp\" (UID: \"c55cb7b3-eaaf-486b-940e-d6ffd06ce8da\") " pod="openshift-console-operator/console-operator-58897d9998-2bkjp" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.160589 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/7f6de1d9-61b5-4cc6-a820-5492052b60ef-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-trd6j\" (UID: \"7f6de1d9-61b5-4cc6-a820-5492052b60ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-trd6j" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.160608 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/04d0121b-ec20-4607-9a6b-89ccaa4a4d57-audit-dir\") pod \"apiserver-7bbb656c7d-pxc25\" (UID: \"04d0121b-ec20-4607-9a6b-89ccaa4a4d57\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pxc25" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.160651 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/7f6de1d9-61b5-4cc6-a820-5492052b60ef-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-trd6j\" (UID: \"7f6de1d9-61b5-4cc6-a820-5492052b60ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-trd6j" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.160672 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vk45p\" (UniqueName: \"kubernetes.io/projected/f21a4520-2f55-4e46-9243-287aa9ae7189-kube-api-access-vk45p\") pod \"openshift-controller-manager-operator-756b6f6bc6-8mkj9\" (UID: \"f21a4520-2f55-4e46-9243-287aa9ae7189\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8mkj9" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.160713 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb-console-oauth-config\") pod \"console-f9d7485db-2brwf\" (UID: \"33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb\") " pod="openshift-console/console-f9d7485db-2brwf" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.160744 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/7f6de1d9-61b5-4cc6-a820-5492052b60ef-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-trd6j\" (UID: \"7f6de1d9-61b5-4cc6-a820-5492052b60ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-trd6j" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.160764 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f21a4520-2f55-4e46-9243-287aa9ae7189-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-8mkj9\" (UID: \"f21a4520-2f55-4e46-9243-287aa9ae7189\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8mkj9" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.160783 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/82715fa2-9367-44b6-a3b4-54b01d4865f1-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-2bmnh\" (UID: \"82715fa2-9367-44b6-a3b4-54b01d4865f1\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2bmnh" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.160802 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vjmf6\" (UniqueName: \"kubernetes.io/projected/82715fa2-9367-44b6-a3b4-54b01d4865f1-kube-api-access-vjmf6\") pod \"cluster-samples-operator-665b6dd947-2bmnh\" (UID: \"82715fa2-9367-44b6-a3b4-54b01d4865f1\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2bmnh" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.160822 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/04d0121b-ec20-4607-9a6b-89ccaa4a4d57-serving-cert\") pod \"apiserver-7bbb656c7d-pxc25\" (UID: \"04d0121b-ec20-4607-9a6b-89ccaa4a4d57\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pxc25" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.160852 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/04d0121b-ec20-4607-9a6b-89ccaa4a4d57-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-pxc25\" (UID: \"04d0121b-ec20-4607-9a6b-89ccaa4a4d57\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pxc25" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.160874 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/04d0121b-ec20-4607-9a6b-89ccaa4a4d57-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-pxc25\" (UID: \"04d0121b-ec20-4607-9a6b-89ccaa4a4d57\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pxc25" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.160896 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb-console-config\") pod \"console-f9d7485db-2brwf\" (UID: \"33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb\") " pod="openshift-console/console-f9d7485db-2brwf" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.160915 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/04d0121b-ec20-4607-9a6b-89ccaa4a4d57-etcd-client\") pod \"apiserver-7bbb656c7d-pxc25\" (UID: \"04d0121b-ec20-4607-9a6b-89ccaa4a4d57\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pxc25" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.160930 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/7f6de1d9-61b5-4cc6-a820-5492052b60ef-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-trd6j\" (UID: \"7f6de1d9-61b5-4cc6-a820-5492052b60ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-trd6j" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.160954 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/26fe0021-1c2a-4f4e-a6cb-86237a120608-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-lf6wn\" (UID: \"26fe0021-1c2a-4f4e-a6cb-86237a120608\") " pod="openshift-controller-manager/controller-manager-879f6c89f-lf6wn" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.160974 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9b2768b9-4054-4f67-a937-2050eebe9c2f-serving-cert\") pod \"apiserver-76f77b778f-qmh5z\" (UID: \"9b2768b9-4054-4f67-a937-2050eebe9c2f\") " pod="openshift-apiserver/apiserver-76f77b778f-qmh5z" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.160994 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/762e836a-1722-4e01-982d-023b84748aa4-images\") pod \"machine-api-operator-5694c8668f-2k22x\" (UID: \"762e836a-1722-4e01-982d-023b84748aa4\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-2k22x" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.161730 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/88eea53a-1d70-43d0-a8a7-9fa681b09772-auth-proxy-config\") pod \"machine-approver-56656f9798-zv9xk\" (UID: \"88eea53a-1d70-43d0-a8a7-9fa681b09772\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-zv9xk" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.172868 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/26fe0021-1c2a-4f4e-a6cb-86237a120608-client-ca\") pod \"controller-manager-879f6c89f-lf6wn\" (UID: \"26fe0021-1c2a-4f4e-a6cb-86237a120608\") " pod="openshift-controller-manager/controller-manager-879f6c89f-lf6wn" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.174117 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/26fe0021-1c2a-4f4e-a6cb-86237a120608-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-lf6wn\" (UID: \"26fe0021-1c2a-4f4e-a6cb-86237a120608\") " pod="openshift-controller-manager/controller-manager-879f6c89f-lf6wn" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.174815 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-5llhc"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.177842 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-hq6j4"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.178577 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-hq6j4" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.174888 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-j6crh" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.179115 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-5llhc" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.175816 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/26fe0021-1c2a-4f4e-a6cb-86237a120608-config\") pod \"controller-manager-879f6c89f-lf6wn\" (UID: \"26fe0021-1c2a-4f4e-a6cb-86237a120608\") " pod="openshift-controller-manager/controller-manager-879f6c89f-lf6wn" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.175978 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/88eea53a-1d70-43d0-a8a7-9fa681b09772-config\") pod \"machine-approver-56656f9798-zv9xk\" (UID: \"88eea53a-1d70-43d0-a8a7-9fa681b09772\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-zv9xk" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.179963 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/88eea53a-1d70-43d0-a8a7-9fa681b09772-machine-approver-tls\") pod \"machine-approver-56656f9798-zv9xk\" (UID: \"88eea53a-1d70-43d0-a8a7-9fa681b09772\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-zv9xk" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.180284 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.176623 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e87d23b8-e74b-4fa9-8f83-760ab58e224d-config\") pod \"route-controller-manager-6576b87f9c-hfdjq\" (UID: \"e87d23b8-e74b-4fa9-8f83-760ab58e224d\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hfdjq" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.181808 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e87d23b8-e74b-4fa9-8f83-760ab58e224d-client-ca\") pod \"route-controller-manager-6576b87f9c-hfdjq\" (UID: \"e87d23b8-e74b-4fa9-8f83-760ab58e224d\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hfdjq" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.181283 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.181924 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.185370 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hhbgt"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.198121 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-9v5mh"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.198620 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-pwm5c"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.198980 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-wkzl6"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.199647 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-jxwcz"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.200129 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-cpppf"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.200556 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-w9b29"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.200961 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-g59zg"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.201774 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-frtv2"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.202693 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-pxc25"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.201332 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hhbgt" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.203023 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-g59zg" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.203573 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-trd6j"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.201412 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-pwm5c" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.201430 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-jxwcz" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.201438 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-wkzl6" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.201471 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-w9b29" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.201471 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-cpppf" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.197490 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.187225 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/26fe0021-1c2a-4f4e-a6cb-86237a120608-serving-cert\") pod \"controller-manager-879f6c89f-lf6wn\" (UID: \"26fe0021-1c2a-4f4e-a6cb-86237a120608\") " pod="openshift-controller-manager/controller-manager-879f6c89f-lf6wn" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.203705 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-frtv2" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.190716 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e87d23b8-e74b-4fa9-8f83-760ab58e224d-serving-cert\") pod \"route-controller-manager-6576b87f9c-hfdjq\" (UID: \"e87d23b8-e74b-4fa9-8f83-760ab58e224d\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hfdjq" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.201388 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-9v5mh" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.210040 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-kdrdj"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.210653 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-kdrdj" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.213352 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.217257 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2bmnh"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.217292 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-hfdjq"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.217751 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-qqwf9"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.218456 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-qqwf9" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.218671 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.223921 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29322105-br8qr"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.224949 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29322105-br8qr" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.224985 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-hv7vm"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.225867 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-hv7vm" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.226173 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lhptc"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.228625 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-n452f"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.228655 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lhptc" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.229699 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-n452f" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.233589 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-6pn8g"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.240009 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.235707 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-gsjr2"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.247740 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-hntgm"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.248033 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-2brwf"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.248051 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8mkj9"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.248064 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-48sxr"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.248239 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-hntgm" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.248528 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-pwm5c"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.248657 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-gsjr2" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.250036 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-qqwf9"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.251625 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-2bkjp"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.259632 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-qmh5z"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.259722 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-5llhc"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.262875 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pjs2q\" (UniqueName: \"kubernetes.io/projected/161a8f41-2f6c-49d4-9efe-4d27a50ed622-kube-api-access-pjs2q\") pod \"authentication-operator-69f744f599-6pn8g\" (UID: \"161a8f41-2f6c-49d4-9efe-4d27a50ed622\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6pn8g" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.263030 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/9b2768b9-4054-4f67-a937-2050eebe9c2f-etcd-client\") pod \"apiserver-76f77b778f-qmh5z\" (UID: \"9b2768b9-4054-4f67-a937-2050eebe9c2f\") " pod="openshift-apiserver/apiserver-76f77b778f-qmh5z" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.263134 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jdfdx\" (UniqueName: \"kubernetes.io/projected/9b2768b9-4054-4f67-a937-2050eebe9c2f-kube-api-access-jdfdx\") pod \"apiserver-76f77b778f-qmh5z\" (UID: \"9b2768b9-4054-4f67-a937-2050eebe9c2f\") " pod="openshift-apiserver/apiserver-76f77b778f-qmh5z" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.263222 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/6334be24-ba19-4acc-afdf-74a88a104fce-metrics-tls\") pod \"dns-operator-744455d44c-8vpb7\" (UID: \"6334be24-ba19-4acc-afdf-74a88a104fce\") " pod="openshift-dns-operator/dns-operator-744455d44c-8vpb7" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.263319 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c0c81d24-bbca-4bc9-af4c-48f18da95147-serving-cert\") pod \"service-ca-operator-777779d784-9v5mh\" (UID: \"c0c81d24-bbca-4bc9-af4c-48f18da95147\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-9v5mh" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.263412 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cktqn\" (UniqueName: \"kubernetes.io/projected/33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb-kube-api-access-cktqn\") pod \"console-f9d7485db-2brwf\" (UID: \"33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb\") " pod="openshift-console/console-f9d7485db-2brwf" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.263638 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7szsr\" (UniqueName: \"kubernetes.io/projected/7f6de1d9-61b5-4cc6-a820-5492052b60ef-kube-api-access-7szsr\") pod \"oauth-openshift-558db77b4-trd6j\" (UID: \"7f6de1d9-61b5-4cc6-a820-5492052b60ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-trd6j" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.263738 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/7f6de1d9-61b5-4cc6-a820-5492052b60ef-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-trd6j\" (UID: \"7f6de1d9-61b5-4cc6-a820-5492052b60ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-trd6j" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.263837 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/263bef0b-5d5b-40e6-ac60-ff6c4d4217af-webhook-cert\") pod \"packageserver-d55dfcdfc-dpknn\" (UID: \"263bef0b-5d5b-40e6-ac60-ff6c4d4217af\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dpknn" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.263937 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f21a4520-2f55-4e46-9243-287aa9ae7189-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-8mkj9\" (UID: \"f21a4520-2f55-4e46-9243-287aa9ae7189\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8mkj9" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.264035 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/161a8f41-2f6c-49d4-9efe-4d27a50ed622-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-6pn8g\" (UID: \"161a8f41-2f6c-49d4-9efe-4d27a50ed622\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6pn8g" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.264232 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7df6d80-4ed7-4192-b079-bd0119903e10-serving-cert\") pod \"openshift-config-operator-7777fb866f-b84ph\" (UID: \"e7df6d80-4ed7-4192-b079-bd0119903e10\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-b84ph" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.264341 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ebc0f2e4-730c-4957-bf35-df11f426f04e-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-g59zg\" (UID: \"ebc0f2e4-730c-4957-bf35-df11f426f04e\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-g59zg" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.264438 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/9b2768b9-4054-4f67-a937-2050eebe9c2f-image-import-ca\") pod \"apiserver-76f77b778f-qmh5z\" (UID: \"9b2768b9-4054-4f67-a937-2050eebe9c2f\") " pod="openshift-apiserver/apiserver-76f77b778f-qmh5z" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.264535 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/9b2768b9-4054-4f67-a937-2050eebe9c2f-node-pullsecrets\") pod \"apiserver-76f77b778f-qmh5z\" (UID: \"9b2768b9-4054-4f67-a937-2050eebe9c2f\") " pod="openshift-apiserver/apiserver-76f77b778f-qmh5z" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.264632 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/7f6de1d9-61b5-4cc6-a820-5492052b60ef-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-trd6j\" (UID: \"7f6de1d9-61b5-4cc6-a820-5492052b60ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-trd6j" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.264731 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/2517f8ad-404f-470e-a785-c4c90c2cdc0d-profile-collector-cert\") pod \"olm-operator-6b444d44fb-rx67q\" (UID: \"2517f8ad-404f-470e-a785-c4c90c2cdc0d\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rx67q" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.264831 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/828f308f-0b92-4019-ab24-96477d0b6a47-profile-collector-cert\") pod \"catalog-operator-68c6474976-hq6j4\" (UID: \"828f308f-0b92-4019-ab24-96477d0b6a47\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-hq6j4" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.264921 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6lnpp\" (UniqueName: \"kubernetes.io/projected/828f308f-0b92-4019-ab24-96477d0b6a47-kube-api-access-6lnpp\") pod \"catalog-operator-68c6474976-hq6j4\" (UID: \"828f308f-0b92-4019-ab24-96477d0b6a47\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-hq6j4" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.265026 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/263bef0b-5d5b-40e6-ac60-ff6c4d4217af-tmpfs\") pod \"packageserver-d55dfcdfc-dpknn\" (UID: \"263bef0b-5d5b-40e6-ac60-ff6c4d4217af\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dpknn" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.265312 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9b2768b9-4054-4f67-a937-2050eebe9c2f-config\") pod \"apiserver-76f77b778f-qmh5z\" (UID: \"9b2768b9-4054-4f67-a937-2050eebe9c2f\") " pod="openshift-apiserver/apiserver-76f77b778f-qmh5z" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.265432 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/9b2768b9-4054-4f67-a937-2050eebe9c2f-audit-dir\") pod \"apiserver-76f77b778f-qmh5z\" (UID: \"9b2768b9-4054-4f67-a937-2050eebe9c2f\") " pod="openshift-apiserver/apiserver-76f77b778f-qmh5z" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.265533 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb-oauth-serving-cert\") pod \"console-f9d7485db-2brwf\" (UID: \"33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb\") " pod="openshift-console/console-f9d7485db-2brwf" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.265641 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c55cb7b3-eaaf-486b-940e-d6ffd06ce8da-trusted-ca\") pod \"console-operator-58897d9998-2bkjp\" (UID: \"c55cb7b3-eaaf-486b-940e-d6ffd06ce8da\") " pod="openshift-console-operator/console-operator-58897d9998-2bkjp" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.265760 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/7f6de1d9-61b5-4cc6-a820-5492052b60ef-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-trd6j\" (UID: \"7f6de1d9-61b5-4cc6-a820-5492052b60ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-trd6j" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.265880 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9b2768b9-4054-4f67-a937-2050eebe9c2f-trusted-ca-bundle\") pod \"apiserver-76f77b778f-qmh5z\" (UID: \"9b2768b9-4054-4f67-a937-2050eebe9c2f\") " pod="openshift-apiserver/apiserver-76f77b778f-qmh5z" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.265975 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb-service-ca\") pod \"console-f9d7485db-2brwf\" (UID: \"33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb\") " pod="openshift-console/console-f9d7485db-2brwf" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.266064 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7f6de1d9-61b5-4cc6-a820-5492052b60ef-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-trd6j\" (UID: \"7f6de1d9-61b5-4cc6-a820-5492052b60ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-trd6j" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.266171 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/7f6de1d9-61b5-4cc6-a820-5492052b60ef-audit-policies\") pod \"oauth-openshift-558db77b4-trd6j\" (UID: \"7f6de1d9-61b5-4cc6-a820-5492052b60ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-trd6j" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.275492 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/7f6de1d9-61b5-4cc6-a820-5492052b60ef-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-trd6j\" (UID: \"7f6de1d9-61b5-4cc6-a820-5492052b60ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-trd6j" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.276990 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/9b2768b9-4054-4f67-a937-2050eebe9c2f-audit-dir\") pod \"apiserver-76f77b778f-qmh5z\" (UID: \"9b2768b9-4054-4f67-a937-2050eebe9c2f\") " pod="openshift-apiserver/apiserver-76f77b778f-qmh5z" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.265484 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.277791 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9b2768b9-4054-4f67-a937-2050eebe9c2f-config\") pod \"apiserver-76f77b778f-qmh5z\" (UID: \"9b2768b9-4054-4f67-a937-2050eebe9c2f\") " pod="openshift-apiserver/apiserver-76f77b778f-qmh5z" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.277805 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c55cb7b3-eaaf-486b-940e-d6ffd06ce8da-trusted-ca\") pod \"console-operator-58897d9998-2bkjp\" (UID: \"c55cb7b3-eaaf-486b-940e-d6ffd06ce8da\") " pod="openshift-console-operator/console-operator-58897d9998-2bkjp" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.264567 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rx67q"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.278265 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/7f6de1d9-61b5-4cc6-a820-5492052b60ef-audit-policies\") pod \"oauth-openshift-558db77b4-trd6j\" (UID: \"7f6de1d9-61b5-4cc6-a820-5492052b60ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-trd6j" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.278468 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9b2768b9-4054-4f67-a937-2050eebe9c2f-trusted-ca-bundle\") pod \"apiserver-76f77b778f-qmh5z\" (UID: \"9b2768b9-4054-4f67-a937-2050eebe9c2f\") " pod="openshift-apiserver/apiserver-76f77b778f-qmh5z" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.279265 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/9b2768b9-4054-4f67-a937-2050eebe9c2f-image-import-ca\") pod \"apiserver-76f77b778f-qmh5z\" (UID: \"9b2768b9-4054-4f67-a937-2050eebe9c2f\") " pod="openshift-apiserver/apiserver-76f77b778f-qmh5z" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.279385 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-frtv2"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.279510 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/9b2768b9-4054-4f67-a937-2050eebe9c2f-node-pullsecrets\") pod \"apiserver-76f77b778f-qmh5z\" (UID: \"9b2768b9-4054-4f67-a937-2050eebe9c2f\") " pod="openshift-apiserver/apiserver-76f77b778f-qmh5z" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.282057 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/161a8f41-2f6c-49d4-9efe-4d27a50ed622-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-6pn8g\" (UID: \"161a8f41-2f6c-49d4-9efe-4d27a50ed622\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6pn8g" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.282219 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/828f308f-0b92-4019-ab24-96477d0b6a47-srv-cert\") pod \"catalog-operator-68c6474976-hq6j4\" (UID: \"828f308f-0b92-4019-ab24-96477d0b6a47\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-hq6j4" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.282286 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-chcfm\" (UniqueName: \"kubernetes.io/projected/762e836a-1722-4e01-982d-023b84748aa4-kube-api-access-chcfm\") pod \"machine-api-operator-5694c8668f-2k22x\" (UID: \"762e836a-1722-4e01-982d-023b84748aa4\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-2k22x" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.282300 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/6334be24-ba19-4acc-afdf-74a88a104fce-metrics-tls\") pod \"dns-operator-744455d44c-8vpb7\" (UID: \"6334be24-ba19-4acc-afdf-74a88a104fce\") " pod="openshift-dns-operator/dns-operator-744455d44c-8vpb7" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.282320 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5dshz\" (UniqueName: \"kubernetes.io/projected/04d0121b-ec20-4607-9a6b-89ccaa4a4d57-kube-api-access-5dshz\") pod \"apiserver-7bbb656c7d-pxc25\" (UID: \"04d0121b-ec20-4607-9a6b-89ccaa4a4d57\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pxc25" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.282355 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/7f6de1d9-61b5-4cc6-a820-5492052b60ef-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-trd6j\" (UID: \"7f6de1d9-61b5-4cc6-a820-5492052b60ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-trd6j" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.282391 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/33744436-5acf-4628-bde8-81e0b2029f1d-etcd-service-ca\") pod \"etcd-operator-b45778765-5llhc\" (UID: \"33744436-5acf-4628-bde8-81e0b2029f1d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-5llhc" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.282426 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/6a1bba23-826a-4f52-8c53-f7364363a6f7-metrics-tls\") pod \"ingress-operator-5b745b69d9-j6crh\" (UID: \"6a1bba23-826a-4f52-8c53-f7364363a6f7\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-j6crh" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.282630 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/27c52669-b56d-45ea-a605-015e92a313e6-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-jxwcz\" (UID: \"27c52669-b56d-45ea-a605-015e92a313e6\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-jxwcz" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.282658 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f455f5fd-4c5c-487c-9d5a-f86a5c16ae20-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-frtv2\" (UID: \"f455f5fd-4c5c-487c-9d5a-f86a5c16ae20\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-frtv2" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.282688 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/33744436-5acf-4628-bde8-81e0b2029f1d-etcd-ca\") pod \"etcd-operator-b45778765-5llhc\" (UID: \"33744436-5acf-4628-bde8-81e0b2029f1d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-5llhc" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.282721 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/7f6de1d9-61b5-4cc6-a820-5492052b60ef-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-trd6j\" (UID: \"7f6de1d9-61b5-4cc6-a820-5492052b60ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-trd6j" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.282756 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/9b2768b9-4054-4f67-a937-2050eebe9c2f-etcd-serving-ca\") pod \"apiserver-76f77b778f-qmh5z\" (UID: \"9b2768b9-4054-4f67-a937-2050eebe9c2f\") " pod="openshift-apiserver/apiserver-76f77b778f-qmh5z" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.282951 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/9b2768b9-4054-4f67-a937-2050eebe9c2f-encryption-config\") pod \"apiserver-76f77b778f-qmh5z\" (UID: \"9b2768b9-4054-4f67-a937-2050eebe9c2f\") " pod="openshift-apiserver/apiserver-76f77b778f-qmh5z" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.282990 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/04d0121b-ec20-4607-9a6b-89ccaa4a4d57-encryption-config\") pod \"apiserver-7bbb656c7d-pxc25\" (UID: \"04d0121b-ec20-4607-9a6b-89ccaa4a4d57\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pxc25" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.283022 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/33744436-5acf-4628-bde8-81e0b2029f1d-config\") pod \"etcd-operator-b45778765-5llhc\" (UID: \"33744436-5acf-4628-bde8-81e0b2029f1d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-5llhc" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.283053 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f455f5fd-4c5c-487c-9d5a-f86a5c16ae20-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-frtv2\" (UID: \"f455f5fd-4c5c-487c-9d5a-f86a5c16ae20\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-frtv2" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.283107 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb-trusted-ca-bundle\") pod \"console-f9d7485db-2brwf\" (UID: \"33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb\") " pod="openshift-console/console-f9d7485db-2brwf" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.283262 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c55cb7b3-eaaf-486b-940e-d6ffd06ce8da-config\") pod \"console-operator-58897d9998-2bkjp\" (UID: \"c55cb7b3-eaaf-486b-940e-d6ffd06ce8da\") " pod="openshift-console-operator/console-operator-58897d9998-2bkjp" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.283318 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/7f6de1d9-61b5-4cc6-a820-5492052b60ef-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-trd6j\" (UID: \"7f6de1d9-61b5-4cc6-a820-5492052b60ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-trd6j" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.283355 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/2517f8ad-404f-470e-a785-c4c90c2cdc0d-srv-cert\") pod \"olm-operator-6b444d44fb-rx67q\" (UID: \"2517f8ad-404f-470e-a785-c4c90c2cdc0d\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rx67q" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.283394 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rgqtw\" (UniqueName: \"kubernetes.io/projected/0c93dc9b-fba0-4d28-b8b3-8def5f66d466-kube-api-access-rgqtw\") pod \"downloads-7954f5f757-48sxr\" (UID: \"0c93dc9b-fba0-4d28-b8b3-8def5f66d466\") " pod="openshift-console/downloads-7954f5f757-48sxr" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.283424 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb-console-serving-cert\") pod \"console-f9d7485db-2brwf\" (UID: \"33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb\") " pod="openshift-console/console-f9d7485db-2brwf" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.283536 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/161a8f41-2f6c-49d4-9efe-4d27a50ed622-serving-cert\") pod \"authentication-operator-69f744f599-6pn8g\" (UID: \"161a8f41-2f6c-49d4-9efe-4d27a50ed622\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6pn8g" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.283574 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gxd62\" (UniqueName: \"kubernetes.io/projected/e7df6d80-4ed7-4192-b079-bd0119903e10-kube-api-access-gxd62\") pod \"openshift-config-operator-7777fb866f-b84ph\" (UID: \"e7df6d80-4ed7-4192-b079-bd0119903e10\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-b84ph" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.283610 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c0c81d24-bbca-4bc9-af4c-48f18da95147-config\") pod \"service-ca-operator-777779d784-9v5mh\" (UID: \"c0c81d24-bbca-4bc9-af4c-48f18da95147\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-9v5mh" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.283648 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/762e836a-1722-4e01-982d-023b84748aa4-config\") pod \"machine-api-operator-5694c8668f-2k22x\" (UID: \"762e836a-1722-4e01-982d-023b84748aa4\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-2k22x" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.283679 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/762e836a-1722-4e01-982d-023b84748aa4-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-2k22x\" (UID: \"762e836a-1722-4e01-982d-023b84748aa4\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-2k22x" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.283865 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ebc0f2e4-730c-4957-bf35-df11f426f04e-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-g59zg\" (UID: \"ebc0f2e4-730c-4957-bf35-df11f426f04e\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-g59zg" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.283906 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bvx5d\" (UniqueName: \"kubernetes.io/projected/6a1bba23-826a-4f52-8c53-f7364363a6f7-kube-api-access-bvx5d\") pod \"ingress-operator-5b745b69d9-j6crh\" (UID: \"6a1bba23-826a-4f52-8c53-f7364363a6f7\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-j6crh" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.283950 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/04d0121b-ec20-4607-9a6b-89ccaa4a4d57-audit-policies\") pod \"apiserver-7bbb656c7d-pxc25\" (UID: \"04d0121b-ec20-4607-9a6b-89ccaa4a4d57\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pxc25" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.283988 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c55cb7b3-eaaf-486b-940e-d6ffd06ce8da-serving-cert\") pod \"console-operator-58897d9998-2bkjp\" (UID: \"c55cb7b3-eaaf-486b-940e-d6ffd06ce8da\") " pod="openshift-console-operator/console-operator-58897d9998-2bkjp" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.284023 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w4tt9\" (UniqueName: \"kubernetes.io/projected/6334be24-ba19-4acc-afdf-74a88a104fce-kube-api-access-w4tt9\") pod \"dns-operator-744455d44c-8vpb7\" (UID: \"6334be24-ba19-4acc-afdf-74a88a104fce\") " pod="openshift-dns-operator/dns-operator-744455d44c-8vpb7" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.284055 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/161a8f41-2f6c-49d4-9efe-4d27a50ed622-config\") pod \"authentication-operator-69f744f599-6pn8g\" (UID: \"161a8f41-2f6c-49d4-9efe-4d27a50ed622\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6pn8g" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.284287 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7f6de1d9-61b5-4cc6-a820-5492052b60ef-audit-dir\") pod \"oauth-openshift-558db77b4-trd6j\" (UID: \"7f6de1d9-61b5-4cc6-a820-5492052b60ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-trd6j" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.284316 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/161a8f41-2f6c-49d4-9efe-4d27a50ed622-service-ca-bundle\") pod \"authentication-operator-69f744f599-6pn8g\" (UID: \"161a8f41-2f6c-49d4-9efe-4d27a50ed622\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6pn8g" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.286166 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hzn2s\" (UniqueName: \"kubernetes.io/projected/4f8a1899-74f1-4a04-b5e8-2b8a3b60ddd1-kube-api-access-hzn2s\") pod \"migrator-59844c95c7-wkzl6\" (UID: \"4f8a1899-74f1-4a04-b5e8-2b8a3b60ddd1\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-wkzl6" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.286230 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/9b2768b9-4054-4f67-a937-2050eebe9c2f-audit\") pod \"apiserver-76f77b778f-qmh5z\" (UID: \"9b2768b9-4054-4f67-a937-2050eebe9c2f\") " pod="openshift-apiserver/apiserver-76f77b778f-qmh5z" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.286261 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/e7df6d80-4ed7-4192-b079-bd0119903e10-available-featuregates\") pod \"openshift-config-operator-7777fb866f-b84ph\" (UID: \"e7df6d80-4ed7-4192-b079-bd0119903e10\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-b84ph" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.286321 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6vg6t\" (UniqueName: \"kubernetes.io/projected/33744436-5acf-4628-bde8-81e0b2029f1d-kube-api-access-6vg6t\") pod \"etcd-operator-b45778765-5llhc\" (UID: \"33744436-5acf-4628-bde8-81e0b2029f1d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-5llhc" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.286344 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6a1bba23-826a-4f52-8c53-f7364363a6f7-trusted-ca\") pod \"ingress-operator-5b745b69d9-j6crh\" (UID: \"6a1bba23-826a-4f52-8c53-f7364363a6f7\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-j6crh" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.286381 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/7f6de1d9-61b5-4cc6-a820-5492052b60ef-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-trd6j\" (UID: \"7f6de1d9-61b5-4cc6-a820-5492052b60ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-trd6j" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.286413 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mxsdg\" (UniqueName: \"kubernetes.io/projected/c55cb7b3-eaaf-486b-940e-d6ffd06ce8da-kube-api-access-mxsdg\") pod \"console-operator-58897d9998-2bkjp\" (UID: \"c55cb7b3-eaaf-486b-940e-d6ffd06ce8da\") " pod="openshift-console-operator/console-operator-58897d9998-2bkjp" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.286451 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/04d0121b-ec20-4607-9a6b-89ccaa4a4d57-audit-dir\") pod \"apiserver-7bbb656c7d-pxc25\" (UID: \"04d0121b-ec20-4607-9a6b-89ccaa4a4d57\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pxc25" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.286492 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/33744436-5acf-4628-bde8-81e0b2029f1d-etcd-client\") pod \"etcd-operator-b45778765-5llhc\" (UID: \"33744436-5acf-4628-bde8-81e0b2029f1d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-5llhc" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.286553 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s4s29\" (UniqueName: \"kubernetes.io/projected/263bef0b-5d5b-40e6-ac60-ff6c4d4217af-kube-api-access-s4s29\") pod \"packageserver-d55dfcdfc-dpknn\" (UID: \"263bef0b-5d5b-40e6-ac60-ff6c4d4217af\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dpknn" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.286620 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/7f6de1d9-61b5-4cc6-a820-5492052b60ef-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-trd6j\" (UID: \"7f6de1d9-61b5-4cc6-a820-5492052b60ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-trd6j" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.286667 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vk45p\" (UniqueName: \"kubernetes.io/projected/f21a4520-2f55-4e46-9243-287aa9ae7189-kube-api-access-vk45p\") pod \"openshift-controller-manager-operator-756b6f6bc6-8mkj9\" (UID: \"f21a4520-2f55-4e46-9243-287aa9ae7189\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8mkj9" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.286769 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7dnwl\" (UniqueName: \"kubernetes.io/projected/c0c81d24-bbca-4bc9-af4c-48f18da95147-kube-api-access-7dnwl\") pod \"service-ca-operator-777779d784-9v5mh\" (UID: \"c0c81d24-bbca-4bc9-af4c-48f18da95147\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-9v5mh" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.286808 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/263bef0b-5d5b-40e6-ac60-ff6c4d4217af-apiservice-cert\") pod \"packageserver-d55dfcdfc-dpknn\" (UID: \"263bef0b-5d5b-40e6-ac60-ff6c4d4217af\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dpknn" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.287029 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g757t\" (UniqueName: \"kubernetes.io/projected/27c52669-b56d-45ea-a605-015e92a313e6-kube-api-access-g757t\") pod \"package-server-manager-789f6589d5-jxwcz\" (UID: \"27c52669-b56d-45ea-a605-015e92a313e6\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-jxwcz" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.290073 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb-console-oauth-config\") pod \"console-f9d7485db-2brwf\" (UID: \"33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb\") " pod="openshift-console/console-f9d7485db-2brwf" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.290148 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/7f6de1d9-61b5-4cc6-a820-5492052b60ef-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-trd6j\" (UID: \"7f6de1d9-61b5-4cc6-a820-5492052b60ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-trd6j" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.291380 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f21a4520-2f55-4e46-9243-287aa9ae7189-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-8mkj9\" (UID: \"f21a4520-2f55-4e46-9243-287aa9ae7189\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8mkj9" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.291516 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rnnqk\" (UniqueName: \"kubernetes.io/projected/2517f8ad-404f-470e-a785-c4c90c2cdc0d-kube-api-access-rnnqk\") pod \"olm-operator-6b444d44fb-rx67q\" (UID: \"2517f8ad-404f-470e-a785-c4c90c2cdc0d\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rx67q" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.291625 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/33744436-5acf-4628-bde8-81e0b2029f1d-serving-cert\") pod \"etcd-operator-b45778765-5llhc\" (UID: \"33744436-5acf-4628-bde8-81e0b2029f1d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-5llhc" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.291727 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f455f5fd-4c5c-487c-9d5a-f86a5c16ae20-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-frtv2\" (UID: \"f455f5fd-4c5c-487c-9d5a-f86a5c16ae20\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-frtv2" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.285588 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/161a8f41-2f6c-49d4-9efe-4d27a50ed622-service-ca-bundle\") pod \"authentication-operator-69f744f599-6pn8g\" (UID: \"161a8f41-2f6c-49d4-9efe-4d27a50ed622\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6pn8g" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.291319 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/9b2768b9-4054-4f67-a937-2050eebe9c2f-etcd-serving-ca\") pod \"apiserver-76f77b778f-qmh5z\" (UID: \"9b2768b9-4054-4f67-a937-2050eebe9c2f\") " pod="openshift-apiserver/apiserver-76f77b778f-qmh5z" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.288207 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/9b2768b9-4054-4f67-a937-2050eebe9c2f-audit\") pod \"apiserver-76f77b778f-qmh5z\" (UID: \"9b2768b9-4054-4f67-a937-2050eebe9c2f\") " pod="openshift-apiserver/apiserver-76f77b778f-qmh5z" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.289329 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/04d0121b-ec20-4607-9a6b-89ccaa4a4d57-audit-dir\") pod \"apiserver-7bbb656c7d-pxc25\" (UID: \"04d0121b-ec20-4607-9a6b-89ccaa4a4d57\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pxc25" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.288536 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/e7df6d80-4ed7-4192-b079-bd0119903e10-available-featuregates\") pod \"openshift-config-operator-7777fb866f-b84ph\" (UID: \"e7df6d80-4ed7-4192-b079-bd0119903e10\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-b84ph" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.289161 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/7f6de1d9-61b5-4cc6-a820-5492052b60ef-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-trd6j\" (UID: \"7f6de1d9-61b5-4cc6-a820-5492052b60ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-trd6j" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.290298 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/9b2768b9-4054-4f67-a937-2050eebe9c2f-encryption-config\") pod \"apiserver-76f77b778f-qmh5z\" (UID: \"9b2768b9-4054-4f67-a937-2050eebe9c2f\") " pod="openshift-apiserver/apiserver-76f77b778f-qmh5z" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.299474 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7f6de1d9-61b5-4cc6-a820-5492052b60ef-audit-dir\") pod \"oauth-openshift-558db77b4-trd6j\" (UID: \"7f6de1d9-61b5-4cc6-a820-5492052b60ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-trd6j" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.291127 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-jxwcz"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.299917 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-9v5mh"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.302072 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dpknn"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.307130 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-9h8zv"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.309625 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-8vpb7"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.309792 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-9h8zv" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.291852 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/82715fa2-9367-44b6-a3b4-54b01d4865f1-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-2bmnh\" (UID: \"82715fa2-9367-44b6-a3b4-54b01d4865f1\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2bmnh" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.311363 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vjmf6\" (UniqueName: \"kubernetes.io/projected/82715fa2-9367-44b6-a3b4-54b01d4865f1-kube-api-access-vjmf6\") pod \"cluster-samples-operator-665b6dd947-2bmnh\" (UID: \"82715fa2-9367-44b6-a3b4-54b01d4865f1\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2bmnh" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.311459 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/04d0121b-ec20-4607-9a6b-89ccaa4a4d57-serving-cert\") pod \"apiserver-7bbb656c7d-pxc25\" (UID: \"04d0121b-ec20-4607-9a6b-89ccaa4a4d57\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pxc25" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.311564 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ebc0f2e4-730c-4957-bf35-df11f426f04e-config\") pod \"kube-apiserver-operator-766d6c64bb-g59zg\" (UID: \"ebc0f2e4-730c-4957-bf35-df11f426f04e\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-g59zg" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.311688 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/04d0121b-ec20-4607-9a6b-89ccaa4a4d57-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-pxc25\" (UID: \"04d0121b-ec20-4607-9a6b-89ccaa4a4d57\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pxc25" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.311778 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/04d0121b-ec20-4607-9a6b-89ccaa4a4d57-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-pxc25\" (UID: \"04d0121b-ec20-4607-9a6b-89ccaa4a4d57\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pxc25" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.311858 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/7f6de1d9-61b5-4cc6-a820-5492052b60ef-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-trd6j\" (UID: \"7f6de1d9-61b5-4cc6-a820-5492052b60ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-trd6j" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.311977 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb-console-config\") pod \"console-f9d7485db-2brwf\" (UID: \"33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb\") " pod="openshift-console/console-f9d7485db-2brwf" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.312076 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/04d0121b-ec20-4607-9a6b-89ccaa4a4d57-etcd-client\") pod \"apiserver-7bbb656c7d-pxc25\" (UID: \"04d0121b-ec20-4607-9a6b-89ccaa4a4d57\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pxc25" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.312277 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/762e836a-1722-4e01-982d-023b84748aa4-images\") pod \"machine-api-operator-5694c8668f-2k22x\" (UID: \"762e836a-1722-4e01-982d-023b84748aa4\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-2k22x" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.312376 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9b2768b9-4054-4f67-a937-2050eebe9c2f-serving-cert\") pod \"apiserver-76f77b778f-qmh5z\" (UID: \"9b2768b9-4054-4f67-a937-2050eebe9c2f\") " pod="openshift-apiserver/apiserver-76f77b778f-qmh5z" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.312457 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/6a1bba23-826a-4f52-8c53-f7364363a6f7-bound-sa-token\") pod \"ingress-operator-5b745b69d9-j6crh\" (UID: \"6a1bba23-826a-4f52-8c53-f7364363a6f7\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-j6crh" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.313170 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-w9b29"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.320077 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-j6crh"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.321758 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lhptc"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.323439 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-hq6j4"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.323539 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-5fjl6"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.324470 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-5fjl6" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.327623 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29322105-br8qr"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.330483 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-cpppf"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.333067 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hhbgt"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.334916 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-b84ph"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.336480 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-wkzl6"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.338147 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-9h8zv"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.339531 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/7f6de1d9-61b5-4cc6-a820-5492052b60ef-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-trd6j\" (UID: \"7f6de1d9-61b5-4cc6-a820-5492052b60ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-trd6j" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.339782 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f21a4520-2f55-4e46-9243-287aa9ae7189-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-8mkj9\" (UID: \"f21a4520-2f55-4e46-9243-287aa9ae7189\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8mkj9" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.340504 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/762e836a-1722-4e01-982d-023b84748aa4-config\") pod \"machine-api-operator-5694c8668f-2k22x\" (UID: \"762e836a-1722-4e01-982d-023b84748aa4\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-2k22x" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.340592 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/7f6de1d9-61b5-4cc6-a820-5492052b60ef-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-trd6j\" (UID: \"7f6de1d9-61b5-4cc6-a820-5492052b60ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-trd6j" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.340980 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb-service-ca\") pod \"console-f9d7485db-2brwf\" (UID: \"33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb\") " pod="openshift-console/console-f9d7485db-2brwf" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.341060 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-g59zg"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.341223 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/161a8f41-2f6c-49d4-9efe-4d27a50ed622-config\") pod \"authentication-operator-69f744f599-6pn8g\" (UID: \"161a8f41-2f6c-49d4-9efe-4d27a50ed622\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6pn8g" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.341272 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/7f6de1d9-61b5-4cc6-a820-5492052b60ef-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-trd6j\" (UID: \"7f6de1d9-61b5-4cc6-a820-5492052b60ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-trd6j" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.341578 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/7f6de1d9-61b5-4cc6-a820-5492052b60ef-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-trd6j\" (UID: \"7f6de1d9-61b5-4cc6-a820-5492052b60ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-trd6j" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.341739 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7f6de1d9-61b5-4cc6-a820-5492052b60ef-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-trd6j\" (UID: \"7f6de1d9-61b5-4cc6-a820-5492052b60ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-trd6j" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.341759 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/7f6de1d9-61b5-4cc6-a820-5492052b60ef-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-trd6j\" (UID: \"7f6de1d9-61b5-4cc6-a820-5492052b60ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-trd6j" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.341937 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/7f6de1d9-61b5-4cc6-a820-5492052b60ef-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-trd6j\" (UID: \"7f6de1d9-61b5-4cc6-a820-5492052b60ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-trd6j" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.342464 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb-console-config\") pod \"console-f9d7485db-2brwf\" (UID: \"33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb\") " pod="openshift-console/console-f9d7485db-2brwf" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.342764 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb-trusted-ca-bundle\") pod \"console-f9d7485db-2brwf\" (UID: \"33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb\") " pod="openshift-console/console-f9d7485db-2brwf" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.342902 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f21a4520-2f55-4e46-9243-287aa9ae7189-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-8mkj9\" (UID: \"f21a4520-2f55-4e46-9243-287aa9ae7189\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8mkj9" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.343378 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb-oauth-serving-cert\") pod \"console-f9d7485db-2brwf\" (UID: \"33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb\") " pod="openshift-console/console-f9d7485db-2brwf" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.345113 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-frcm4\" (UniqueName: \"kubernetes.io/projected/e87d23b8-e74b-4fa9-8f83-760ab58e224d-kube-api-access-frcm4\") pod \"route-controller-manager-6576b87f9c-hfdjq\" (UID: \"e87d23b8-e74b-4fa9-8f83-760ab58e224d\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hfdjq" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.345437 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/7f6de1d9-61b5-4cc6-a820-5492052b60ef-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-trd6j\" (UID: \"7f6de1d9-61b5-4cc6-a820-5492052b60ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-trd6j" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.345936 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mzrh8\" (UniqueName: \"kubernetes.io/projected/26fe0021-1c2a-4f4e-a6cb-86237a120608-kube-api-access-mzrh8\") pod \"controller-manager-879f6c89f-lf6wn\" (UID: \"26fe0021-1c2a-4f4e-a6cb-86237a120608\") " pod="openshift-controller-manager/controller-manager-879f6c89f-lf6wn" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.348635 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/82715fa2-9367-44b6-a3b4-54b01d4865f1-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-2bmnh\" (UID: \"82715fa2-9367-44b6-a3b4-54b01d4865f1\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2bmnh" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.350824 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c55cb7b3-eaaf-486b-940e-d6ffd06ce8da-serving-cert\") pod \"console-operator-58897d9998-2bkjp\" (UID: \"c55cb7b3-eaaf-486b-940e-d6ffd06ce8da\") " pod="openshift-console-operator/console-operator-58897d9998-2bkjp" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.352642 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c55cb7b3-eaaf-486b-940e-d6ffd06ce8da-config\") pod \"console-operator-58897d9998-2bkjp\" (UID: \"c55cb7b3-eaaf-486b-940e-d6ffd06ce8da\") " pod="openshift-console-operator/console-operator-58897d9998-2bkjp" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.352712 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/04d0121b-ec20-4607-9a6b-89ccaa4a4d57-encryption-config\") pod \"apiserver-7bbb656c7d-pxc25\" (UID: \"04d0121b-ec20-4607-9a6b-89ccaa4a4d57\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pxc25" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.353293 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7df6d80-4ed7-4192-b079-bd0119903e10-serving-cert\") pod \"openshift-config-operator-7777fb866f-b84ph\" (UID: \"e7df6d80-4ed7-4192-b079-bd0119903e10\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-b84ph" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.353704 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/762e836a-1722-4e01-982d-023b84748aa4-images\") pod \"machine-api-operator-5694c8668f-2k22x\" (UID: \"762e836a-1722-4e01-982d-023b84748aa4\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-2k22x" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.353823 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/9b2768b9-4054-4f67-a937-2050eebe9c2f-etcd-client\") pod \"apiserver-76f77b778f-qmh5z\" (UID: \"9b2768b9-4054-4f67-a937-2050eebe9c2f\") " pod="openshift-apiserver/apiserver-76f77b778f-qmh5z" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.354371 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/7f6de1d9-61b5-4cc6-a820-5492052b60ef-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-trd6j\" (UID: \"7f6de1d9-61b5-4cc6-a820-5492052b60ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-trd6j" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.355456 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/04d0121b-ec20-4607-9a6b-89ccaa4a4d57-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-pxc25\" (UID: \"04d0121b-ec20-4607-9a6b-89ccaa4a4d57\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pxc25" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.355636 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hfdjq" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.355971 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/04d0121b-ec20-4607-9a6b-89ccaa4a4d57-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-pxc25\" (UID: \"04d0121b-ec20-4607-9a6b-89ccaa4a4d57\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pxc25" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.359163 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.359364 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9pqz9\" (UniqueName: \"kubernetes.io/projected/88eea53a-1d70-43d0-a8a7-9fa681b09772-kube-api-access-9pqz9\") pod \"machine-approver-56656f9798-zv9xk\" (UID: \"88eea53a-1d70-43d0-a8a7-9fa681b09772\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-zv9xk" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.360787 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb-console-serving-cert\") pod \"console-f9d7485db-2brwf\" (UID: \"33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb\") " pod="openshift-console/console-f9d7485db-2brwf" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.361695 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-ncbbt"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.356192 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/04d0121b-ec20-4607-9a6b-89ccaa4a4d57-audit-policies\") pod \"apiserver-7bbb656c7d-pxc25\" (UID: \"04d0121b-ec20-4607-9a6b-89ccaa4a4d57\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pxc25" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.364036 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-2k22x"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.365417 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-5fjl6"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.367026 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-gsjr2"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.368186 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-hntgm"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.369296 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-n452f"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.369462 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9b2768b9-4054-4f67-a937-2050eebe9c2f-serving-cert\") pod \"apiserver-76f77b778f-qmh5z\" (UID: \"9b2768b9-4054-4f67-a937-2050eebe9c2f\") " pod="openshift-apiserver/apiserver-76f77b778f-qmh5z" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.369693 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/04d0121b-ec20-4607-9a6b-89ccaa4a4d57-serving-cert\") pod \"apiserver-7bbb656c7d-pxc25\" (UID: \"04d0121b-ec20-4607-9a6b-89ccaa4a4d57\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pxc25" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.369721 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb-console-oauth-config\") pod \"console-f9d7485db-2brwf\" (UID: \"33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb\") " pod="openshift-console/console-f9d7485db-2brwf" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.370355 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/161a8f41-2f6c-49d4-9efe-4d27a50ed622-serving-cert\") pod \"authentication-operator-69f744f599-6pn8g\" (UID: \"161a8f41-2f6c-49d4-9efe-4d27a50ed622\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6pn8g" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.370442 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-hv7vm"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.370452 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/04d0121b-ec20-4607-9a6b-89ccaa4a4d57-etcd-client\") pod \"apiserver-7bbb656c7d-pxc25\" (UID: \"04d0121b-ec20-4607-9a6b-89ccaa4a4d57\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pxc25" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.371411 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-kwml2"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.372341 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/762e836a-1722-4e01-982d-023b84748aa4-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-2k22x\" (UID: \"762e836a-1722-4e01-982d-023b84748aa4\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-2k22x" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.372910 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-sfvwc"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.373069 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-kwml2" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.374018 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-kwml2"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.374147 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-sfvwc" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.378814 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.398530 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.413616 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/828f308f-0b92-4019-ab24-96477d0b6a47-srv-cert\") pod \"catalog-operator-68c6474976-hq6j4\" (UID: \"828f308f-0b92-4019-ab24-96477d0b6a47\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-hq6j4" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.413675 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/33744436-5acf-4628-bde8-81e0b2029f1d-etcd-service-ca\") pod \"etcd-operator-b45778765-5llhc\" (UID: \"33744436-5acf-4628-bde8-81e0b2029f1d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-5llhc" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.413704 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/6a1bba23-826a-4f52-8c53-f7364363a6f7-metrics-tls\") pod \"ingress-operator-5b745b69d9-j6crh\" (UID: \"6a1bba23-826a-4f52-8c53-f7364363a6f7\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-j6crh" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.413727 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/27c52669-b56d-45ea-a605-015e92a313e6-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-jxwcz\" (UID: \"27c52669-b56d-45ea-a605-015e92a313e6\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-jxwcz" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.413750 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f455f5fd-4c5c-487c-9d5a-f86a5c16ae20-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-frtv2\" (UID: \"f455f5fd-4c5c-487c-9d5a-f86a5c16ae20\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-frtv2" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.413770 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/33744436-5acf-4628-bde8-81e0b2029f1d-etcd-ca\") pod \"etcd-operator-b45778765-5llhc\" (UID: \"33744436-5acf-4628-bde8-81e0b2029f1d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-5llhc" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.413794 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/33744436-5acf-4628-bde8-81e0b2029f1d-config\") pod \"etcd-operator-b45778765-5llhc\" (UID: \"33744436-5acf-4628-bde8-81e0b2029f1d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-5llhc" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.413815 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f455f5fd-4c5c-487c-9d5a-f86a5c16ae20-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-frtv2\" (UID: \"f455f5fd-4c5c-487c-9d5a-f86a5c16ae20\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-frtv2" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.413838 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/2517f8ad-404f-470e-a785-c4c90c2cdc0d-srv-cert\") pod \"olm-operator-6b444d44fb-rx67q\" (UID: \"2517f8ad-404f-470e-a785-c4c90c2cdc0d\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rx67q" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.413874 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c0c81d24-bbca-4bc9-af4c-48f18da95147-config\") pod \"service-ca-operator-777779d784-9v5mh\" (UID: \"c0c81d24-bbca-4bc9-af4c-48f18da95147\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-9v5mh" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.413898 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ebc0f2e4-730c-4957-bf35-df11f426f04e-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-g59zg\" (UID: \"ebc0f2e4-730c-4957-bf35-df11f426f04e\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-g59zg" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.413921 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bvx5d\" (UniqueName: \"kubernetes.io/projected/6a1bba23-826a-4f52-8c53-f7364363a6f7-kube-api-access-bvx5d\") pod \"ingress-operator-5b745b69d9-j6crh\" (UID: \"6a1bba23-826a-4f52-8c53-f7364363a6f7\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-j6crh" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.413957 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hzn2s\" (UniqueName: \"kubernetes.io/projected/4f8a1899-74f1-4a04-b5e8-2b8a3b60ddd1-kube-api-access-hzn2s\") pod \"migrator-59844c95c7-wkzl6\" (UID: \"4f8a1899-74f1-4a04-b5e8-2b8a3b60ddd1\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-wkzl6" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.413984 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6vg6t\" (UniqueName: \"kubernetes.io/projected/33744436-5acf-4628-bde8-81e0b2029f1d-kube-api-access-6vg6t\") pod \"etcd-operator-b45778765-5llhc\" (UID: \"33744436-5acf-4628-bde8-81e0b2029f1d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-5llhc" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.414007 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6a1bba23-826a-4f52-8c53-f7364363a6f7-trusted-ca\") pod \"ingress-operator-5b745b69d9-j6crh\" (UID: \"6a1bba23-826a-4f52-8c53-f7364363a6f7\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-j6crh" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.414038 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/33744436-5acf-4628-bde8-81e0b2029f1d-etcd-client\") pod \"etcd-operator-b45778765-5llhc\" (UID: \"33744436-5acf-4628-bde8-81e0b2029f1d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-5llhc" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.414060 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s4s29\" (UniqueName: \"kubernetes.io/projected/263bef0b-5d5b-40e6-ac60-ff6c4d4217af-kube-api-access-s4s29\") pod \"packageserver-d55dfcdfc-dpknn\" (UID: \"263bef0b-5d5b-40e6-ac60-ff6c4d4217af\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dpknn" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.414121 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7dnwl\" (UniqueName: \"kubernetes.io/projected/c0c81d24-bbca-4bc9-af4c-48f18da95147-kube-api-access-7dnwl\") pod \"service-ca-operator-777779d784-9v5mh\" (UID: \"c0c81d24-bbca-4bc9-af4c-48f18da95147\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-9v5mh" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.414152 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/263bef0b-5d5b-40e6-ac60-ff6c4d4217af-apiservice-cert\") pod \"packageserver-d55dfcdfc-dpknn\" (UID: \"263bef0b-5d5b-40e6-ac60-ff6c4d4217af\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dpknn" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.414196 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g757t\" (UniqueName: \"kubernetes.io/projected/27c52669-b56d-45ea-a605-015e92a313e6-kube-api-access-g757t\") pod \"package-server-manager-789f6589d5-jxwcz\" (UID: \"27c52669-b56d-45ea-a605-015e92a313e6\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-jxwcz" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.414220 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rnnqk\" (UniqueName: \"kubernetes.io/projected/2517f8ad-404f-470e-a785-c4c90c2cdc0d-kube-api-access-rnnqk\") pod \"olm-operator-6b444d44fb-rx67q\" (UID: \"2517f8ad-404f-470e-a785-c4c90c2cdc0d\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rx67q" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.414243 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/33744436-5acf-4628-bde8-81e0b2029f1d-serving-cert\") pod \"etcd-operator-b45778765-5llhc\" (UID: \"33744436-5acf-4628-bde8-81e0b2029f1d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-5llhc" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.414268 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f455f5fd-4c5c-487c-9d5a-f86a5c16ae20-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-frtv2\" (UID: \"f455f5fd-4c5c-487c-9d5a-f86a5c16ae20\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-frtv2" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.414305 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ebc0f2e4-730c-4957-bf35-df11f426f04e-config\") pod \"kube-apiserver-operator-766d6c64bb-g59zg\" (UID: \"ebc0f2e4-730c-4957-bf35-df11f426f04e\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-g59zg" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.414340 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/6a1bba23-826a-4f52-8c53-f7364363a6f7-bound-sa-token\") pod \"ingress-operator-5b745b69d9-j6crh\" (UID: \"6a1bba23-826a-4f52-8c53-f7364363a6f7\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-j6crh" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.414377 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c0c81d24-bbca-4bc9-af4c-48f18da95147-serving-cert\") pod \"service-ca-operator-777779d784-9v5mh\" (UID: \"c0c81d24-bbca-4bc9-af4c-48f18da95147\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-9v5mh" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.414413 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/263bef0b-5d5b-40e6-ac60-ff6c4d4217af-webhook-cert\") pod \"packageserver-d55dfcdfc-dpknn\" (UID: \"263bef0b-5d5b-40e6-ac60-ff6c4d4217af\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dpknn" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.414434 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ebc0f2e4-730c-4957-bf35-df11f426f04e-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-g59zg\" (UID: \"ebc0f2e4-730c-4957-bf35-df11f426f04e\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-g59zg" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.414460 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/2517f8ad-404f-470e-a785-c4c90c2cdc0d-profile-collector-cert\") pod \"olm-operator-6b444d44fb-rx67q\" (UID: \"2517f8ad-404f-470e-a785-c4c90c2cdc0d\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rx67q" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.414480 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/828f308f-0b92-4019-ab24-96477d0b6a47-profile-collector-cert\") pod \"catalog-operator-68c6474976-hq6j4\" (UID: \"828f308f-0b92-4019-ab24-96477d0b6a47\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-hq6j4" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.414502 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6lnpp\" (UniqueName: \"kubernetes.io/projected/828f308f-0b92-4019-ab24-96477d0b6a47-kube-api-access-6lnpp\") pod \"catalog-operator-68c6474976-hq6j4\" (UID: \"828f308f-0b92-4019-ab24-96477d0b6a47\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-hq6j4" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.414537 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/263bef0b-5d5b-40e6-ac60-ff6c4d4217af-tmpfs\") pod \"packageserver-d55dfcdfc-dpknn\" (UID: \"263bef0b-5d5b-40e6-ac60-ff6c4d4217af\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dpknn" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.415161 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/263bef0b-5d5b-40e6-ac60-ff6c4d4217af-tmpfs\") pod \"packageserver-d55dfcdfc-dpknn\" (UID: \"263bef0b-5d5b-40e6-ac60-ff6c4d4217af\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dpknn" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.417824 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/828f308f-0b92-4019-ab24-96477d0b6a47-srv-cert\") pod \"catalog-operator-68c6474976-hq6j4\" (UID: \"828f308f-0b92-4019-ab24-96477d0b6a47\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-hq6j4" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.417928 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/2517f8ad-404f-470e-a785-c4c90c2cdc0d-srv-cert\") pod \"olm-operator-6b444d44fb-rx67q\" (UID: \"2517f8ad-404f-470e-a785-c4c90c2cdc0d\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rx67q" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.418631 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.419311 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/2517f8ad-404f-470e-a785-c4c90c2cdc0d-profile-collector-cert\") pod \"olm-operator-6b444d44fb-rx67q\" (UID: \"2517f8ad-404f-470e-a785-c4c90c2cdc0d\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rx67q" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.420257 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/828f308f-0b92-4019-ab24-96477d0b6a47-profile-collector-cert\") pod \"catalog-operator-68c6474976-hq6j4\" (UID: \"828f308f-0b92-4019-ab24-96477d0b6a47\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-hq6j4" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.420839 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/263bef0b-5d5b-40e6-ac60-ff6c4d4217af-apiservice-cert\") pod \"packageserver-d55dfcdfc-dpknn\" (UID: \"263bef0b-5d5b-40e6-ac60-ff6c4d4217af\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dpknn" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.421710 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/263bef0b-5d5b-40e6-ac60-ff6c4d4217af-webhook-cert\") pod \"packageserver-d55dfcdfc-dpknn\" (UID: \"263bef0b-5d5b-40e6-ac60-ff6c4d4217af\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dpknn" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.438551 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.448607 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/6a1bba23-826a-4f52-8c53-f7364363a6f7-metrics-tls\") pod \"ingress-operator-5b745b69d9-j6crh\" (UID: \"6a1bba23-826a-4f52-8c53-f7364363a6f7\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-j6crh" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.466026 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.478531 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.498951 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.508520 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/33744436-5acf-4628-bde8-81e0b2029f1d-serving-cert\") pod \"etcd-operator-b45778765-5llhc\" (UID: \"33744436-5acf-4628-bde8-81e0b2029f1d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-5llhc" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.519955 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.527899 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/33744436-5acf-4628-bde8-81e0b2029f1d-etcd-client\") pod \"etcd-operator-b45778765-5llhc\" (UID: \"33744436-5acf-4628-bde8-81e0b2029f1d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-5llhc" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.539292 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.558790 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.565570 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/33744436-5acf-4628-bde8-81e0b2029f1d-config\") pod \"etcd-operator-b45778765-5llhc\" (UID: \"33744436-5acf-4628-bde8-81e0b2029f1d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-5llhc" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.578869 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.585122 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/33744436-5acf-4628-bde8-81e0b2029f1d-etcd-ca\") pod \"etcd-operator-b45778765-5llhc\" (UID: \"33744436-5acf-4628-bde8-81e0b2029f1d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-5llhc" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.598602 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.604992 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/33744436-5acf-4628-bde8-81e0b2029f1d-etcd-service-ca\") pod \"etcd-operator-b45778765-5llhc\" (UID: \"33744436-5acf-4628-bde8-81e0b2029f1d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-5llhc" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.618262 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.622674 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-lf6wn" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.634048 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-zv9xk" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.639732 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Oct 01 13:46:57 crc kubenswrapper[4605]: W1001 13:46:57.651974 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod88eea53a_1d70_43d0_a8a7_9fa681b09772.slice/crio-6b5025047f64b7a6c2f0af3be1e0006187b73e455dd74e1ab936d168d372e43b WatchSource:0}: Error finding container 6b5025047f64b7a6c2f0af3be1e0006187b73e455dd74e1ab936d168d372e43b: Status 404 returned error can't find the container with id 6b5025047f64b7a6c2f0af3be1e0006187b73e455dd74e1ab936d168d372e43b Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.661291 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.674310 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6a1bba23-826a-4f52-8c53-f7364363a6f7-trusted-ca\") pod \"ingress-operator-5b745b69d9-j6crh\" (UID: \"6a1bba23-826a-4f52-8c53-f7364363a6f7\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-j6crh" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.679400 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.698903 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.719295 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.728868 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ebc0f2e4-730c-4957-bf35-df11f426f04e-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-g59zg\" (UID: \"ebc0f2e4-730c-4957-bf35-df11f426f04e\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-g59zg" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.739418 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.746619 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ebc0f2e4-730c-4957-bf35-df11f426f04e-config\") pod \"kube-apiserver-operator-766d6c64bb-g59zg\" (UID: \"ebc0f2e4-730c-4957-bf35-df11f426f04e\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-g59zg" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.759235 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.779218 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.790903 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-zv9xk" event={"ID":"88eea53a-1d70-43d0-a8a7-9fa681b09772","Type":"ContainerStarted","Data":"6b5025047f64b7a6c2f0af3be1e0006187b73e455dd74e1ab936d168d372e43b"} Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.792152 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/27c52669-b56d-45ea-a605-015e92a313e6-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-jxwcz\" (UID: \"27c52669-b56d-45ea-a605-015e92a313e6\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-jxwcz" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.798951 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.821461 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.838692 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.861263 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.879070 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.899334 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.919164 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.938871 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.952641 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-hfdjq"] Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.958498 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.978821 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Oct 01 13:46:57 crc kubenswrapper[4605]: W1001 13:46:57.991604 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode87d23b8_e74b_4fa9_8f83_760ab58e224d.slice/crio-e90c05da60d5d2b390f4c4975701fe400eff20aa198fa42e127923e2f3dddd96 WatchSource:0}: Error finding container e90c05da60d5d2b390f4c4975701fe400eff20aa198fa42e127923e2f3dddd96: Status 404 returned error can't find the container with id e90c05da60d5d2b390f4c4975701fe400eff20aa198fa42e127923e2f3dddd96 Oct 01 13:46:57 crc kubenswrapper[4605]: I1001 13:46:57.998900 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Oct 01 13:46:58 crc kubenswrapper[4605]: I1001 13:46:58.008357 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f455f5fd-4c5c-487c-9d5a-f86a5c16ae20-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-frtv2\" (UID: \"f455f5fd-4c5c-487c-9d5a-f86a5c16ae20\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-frtv2" Oct 01 13:46:58 crc kubenswrapper[4605]: I1001 13:46:58.018289 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Oct 01 13:46:58 crc kubenswrapper[4605]: I1001 13:46:58.038348 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Oct 01 13:46:58 crc kubenswrapper[4605]: I1001 13:46:58.045300 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f455f5fd-4c5c-487c-9d5a-f86a5c16ae20-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-frtv2\" (UID: \"f455f5fd-4c5c-487c-9d5a-f86a5c16ae20\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-frtv2" Oct 01 13:46:58 crc kubenswrapper[4605]: I1001 13:46:58.058953 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Oct 01 13:46:58 crc kubenswrapper[4605]: I1001 13:46:58.063173 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-lf6wn"] Oct 01 13:46:58 crc kubenswrapper[4605]: I1001 13:46:58.078445 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Oct 01 13:46:58 crc kubenswrapper[4605]: I1001 13:46:58.093201 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c0c81d24-bbca-4bc9-af4c-48f18da95147-serving-cert\") pod \"service-ca-operator-777779d784-9v5mh\" (UID: \"c0c81d24-bbca-4bc9-af4c-48f18da95147\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-9v5mh" Oct 01 13:46:58 crc kubenswrapper[4605]: I1001 13:46:58.098825 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Oct 01 13:46:58 crc kubenswrapper[4605]: I1001 13:46:58.106527 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c0c81d24-bbca-4bc9-af4c-48f18da95147-config\") pod \"service-ca-operator-777779d784-9v5mh\" (UID: \"c0c81d24-bbca-4bc9-af4c-48f18da95147\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-9v5mh" Oct 01 13:46:58 crc kubenswrapper[4605]: I1001 13:46:58.118824 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Oct 01 13:46:58 crc kubenswrapper[4605]: I1001 13:46:58.159560 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Oct 01 13:46:58 crc kubenswrapper[4605]: I1001 13:46:58.178501 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Oct 01 13:46:58 crc kubenswrapper[4605]: I1001 13:46:58.199277 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Oct 01 13:46:58 crc kubenswrapper[4605]: I1001 13:46:58.217448 4605 request.go:700] Waited for 1.005850262s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-ingress/secrets?fieldSelector=metadata.name%3Drouter-stats-default&limit=500&resourceVersion=0 Oct 01 13:46:58 crc kubenswrapper[4605]: I1001 13:46:58.218697 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Oct 01 13:46:58 crc kubenswrapper[4605]: I1001 13:46:58.239487 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Oct 01 13:46:58 crc kubenswrapper[4605]: I1001 13:46:58.259219 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Oct 01 13:46:58 crc kubenswrapper[4605]: I1001 13:46:58.279478 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Oct 01 13:46:58 crc kubenswrapper[4605]: I1001 13:46:58.298432 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Oct 01 13:46:58 crc kubenswrapper[4605]: I1001 13:46:58.318206 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Oct 01 13:46:58 crc kubenswrapper[4605]: I1001 13:46:58.340273 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Oct 01 13:46:58 crc kubenswrapper[4605]: I1001 13:46:58.358979 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 01 13:46:58 crc kubenswrapper[4605]: I1001 13:46:58.379437 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 01 13:46:58 crc kubenswrapper[4605]: I1001 13:46:58.398751 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Oct 01 13:46:58 crc kubenswrapper[4605]: I1001 13:46:58.419621 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Oct 01 13:46:58 crc kubenswrapper[4605]: I1001 13:46:58.449087 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Oct 01 13:46:58 crc kubenswrapper[4605]: I1001 13:46:58.459487 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Oct 01 13:46:58 crc kubenswrapper[4605]: I1001 13:46:58.481348 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Oct 01 13:46:58 crc kubenswrapper[4605]: I1001 13:46:58.500616 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Oct 01 13:46:58 crc kubenswrapper[4605]: I1001 13:46:58.520190 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Oct 01 13:46:58 crc kubenswrapper[4605]: I1001 13:46:58.548772 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Oct 01 13:46:58 crc kubenswrapper[4605]: I1001 13:46:58.559368 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Oct 01 13:46:58 crc kubenswrapper[4605]: I1001 13:46:58.580285 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Oct 01 13:46:58 crc kubenswrapper[4605]: I1001 13:46:58.598975 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Oct 01 13:46:58 crc kubenswrapper[4605]: I1001 13:46:58.618948 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Oct 01 13:46:58 crc kubenswrapper[4605]: I1001 13:46:58.639338 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Oct 01 13:46:58 crc kubenswrapper[4605]: I1001 13:46:58.658834 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Oct 01 13:46:58 crc kubenswrapper[4605]: I1001 13:46:58.679225 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Oct 01 13:46:58 crc kubenswrapper[4605]: I1001 13:46:58.700200 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Oct 01 13:46:58 crc kubenswrapper[4605]: I1001 13:46:58.718759 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Oct 01 13:46:58 crc kubenswrapper[4605]: I1001 13:46:58.739787 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Oct 01 13:46:58 crc kubenswrapper[4605]: I1001 13:46:58.759765 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Oct 01 13:46:58 crc kubenswrapper[4605]: I1001 13:46:58.779421 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Oct 01 13:46:58 crc kubenswrapper[4605]: I1001 13:46:58.795351 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-zv9xk" event={"ID":"88eea53a-1d70-43d0-a8a7-9fa681b09772","Type":"ContainerStarted","Data":"f733f40221a8417477feca0897fab3db6e61fce049d86d8fc20880dbb747cc95"} Oct 01 13:46:58 crc kubenswrapper[4605]: I1001 13:46:58.795398 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-zv9xk" event={"ID":"88eea53a-1d70-43d0-a8a7-9fa681b09772","Type":"ContainerStarted","Data":"1b5f497aa885de3fc0c1756f8fca8b49210c559311e6c0c3f729a61be7e8ce45"} Oct 01 13:46:58 crc kubenswrapper[4605]: I1001 13:46:58.796703 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hfdjq" event={"ID":"e87d23b8-e74b-4fa9-8f83-760ab58e224d","Type":"ContainerStarted","Data":"23429c6f48681f44bc1948d1238a2516c1167904db2a51b2e480cc4907f2150c"} Oct 01 13:46:58 crc kubenswrapper[4605]: I1001 13:46:58.796733 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hfdjq" event={"ID":"e87d23b8-e74b-4fa9-8f83-760ab58e224d","Type":"ContainerStarted","Data":"e90c05da60d5d2b390f4c4975701fe400eff20aa198fa42e127923e2f3dddd96"} Oct 01 13:46:58 crc kubenswrapper[4605]: I1001 13:46:58.796866 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hfdjq" Oct 01 13:46:58 crc kubenswrapper[4605]: I1001 13:46:58.798423 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-lf6wn" event={"ID":"26fe0021-1c2a-4f4e-a6cb-86237a120608","Type":"ContainerStarted","Data":"8fafc628b5d3b01cc49d30f63aabf25c8c903c0c366016c7123c18ac28f8f81f"} Oct 01 13:46:58 crc kubenswrapper[4605]: I1001 13:46:58.798467 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-lf6wn" event={"ID":"26fe0021-1c2a-4f4e-a6cb-86237a120608","Type":"ContainerStarted","Data":"57a366074b349e65fd1386d22bb953f39f90289e6cb1cd267622811006966d42"} Oct 01 13:46:58 crc kubenswrapper[4605]: I1001 13:46:58.798641 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-lf6wn" Oct 01 13:46:58 crc kubenswrapper[4605]: I1001 13:46:58.799207 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Oct 01 13:46:58 crc kubenswrapper[4605]: I1001 13:46:58.800209 4605 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-lf6wn container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" start-of-body= Oct 01 13:46:58 crc kubenswrapper[4605]: I1001 13:46:58.800261 4605 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-lf6wn" podUID="26fe0021-1c2a-4f4e-a6cb-86237a120608" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" Oct 01 13:46:58 crc kubenswrapper[4605]: I1001 13:46:58.819855 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Oct 01 13:46:58 crc kubenswrapper[4605]: I1001 13:46:58.839400 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Oct 01 13:46:58 crc kubenswrapper[4605]: I1001 13:46:58.859236 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Oct 01 13:46:58 crc kubenswrapper[4605]: I1001 13:46:58.901330 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7szsr\" (UniqueName: \"kubernetes.io/projected/7f6de1d9-61b5-4cc6-a820-5492052b60ef-kube-api-access-7szsr\") pod \"oauth-openshift-558db77b4-trd6j\" (UID: \"7f6de1d9-61b5-4cc6-a820-5492052b60ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-trd6j" Oct 01 13:46:58 crc kubenswrapper[4605]: I1001 13:46:58.929041 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pjs2q\" (UniqueName: \"kubernetes.io/projected/161a8f41-2f6c-49d4-9efe-4d27a50ed622-kube-api-access-pjs2q\") pod \"authentication-operator-69f744f599-6pn8g\" (UID: \"161a8f41-2f6c-49d4-9efe-4d27a50ed622\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6pn8g" Oct 01 13:46:58 crc kubenswrapper[4605]: I1001 13:46:58.938037 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cktqn\" (UniqueName: \"kubernetes.io/projected/33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb-kube-api-access-cktqn\") pod \"console-f9d7485db-2brwf\" (UID: \"33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb\") " pod="openshift-console/console-f9d7485db-2brwf" Oct 01 13:46:58 crc kubenswrapper[4605]: I1001 13:46:58.958449 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jdfdx\" (UniqueName: \"kubernetes.io/projected/9b2768b9-4054-4f67-a937-2050eebe9c2f-kube-api-access-jdfdx\") pod \"apiserver-76f77b778f-qmh5z\" (UID: \"9b2768b9-4054-4f67-a937-2050eebe9c2f\") " pod="openshift-apiserver/apiserver-76f77b778f-qmh5z" Oct 01 13:46:58 crc kubenswrapper[4605]: I1001 13:46:58.979707 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-chcfm\" (UniqueName: \"kubernetes.io/projected/762e836a-1722-4e01-982d-023b84748aa4-kube-api-access-chcfm\") pod \"machine-api-operator-5694c8668f-2k22x\" (UID: \"762e836a-1722-4e01-982d-023b84748aa4\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-2k22x" Oct 01 13:46:58 crc kubenswrapper[4605]: I1001 13:46:58.992193 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hfdjq" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.002865 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-6pn8g" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.020788 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mxsdg\" (UniqueName: \"kubernetes.io/projected/c55cb7b3-eaaf-486b-940e-d6ffd06ce8da-kube-api-access-mxsdg\") pod \"console-operator-58897d9998-2bkjp\" (UID: \"c55cb7b3-eaaf-486b-940e-d6ffd06ce8da\") " pod="openshift-console-operator/console-operator-58897d9998-2bkjp" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.030328 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vk45p\" (UniqueName: \"kubernetes.io/projected/f21a4520-2f55-4e46-9243-287aa9ae7189-kube-api-access-vk45p\") pod \"openshift-controller-manager-operator-756b6f6bc6-8mkj9\" (UID: \"f21a4520-2f55-4e46-9243-287aa9ae7189\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8mkj9" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.031758 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-trd6j" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.038193 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5dshz\" (UniqueName: \"kubernetes.io/projected/04d0121b-ec20-4607-9a6b-89ccaa4a4d57-kube-api-access-5dshz\") pod \"apiserver-7bbb656c7d-pxc25\" (UID: \"04d0121b-ec20-4607-9a6b-89ccaa4a4d57\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pxc25" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.050991 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-2brwf" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.059945 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-2bkjp" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.080842 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w4tt9\" (UniqueName: \"kubernetes.io/projected/6334be24-ba19-4acc-afdf-74a88a104fce-kube-api-access-w4tt9\") pod \"dns-operator-744455d44c-8vpb7\" (UID: \"6334be24-ba19-4acc-afdf-74a88a104fce\") " pod="openshift-dns-operator/dns-operator-744455d44c-8vpb7" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.097977 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rgqtw\" (UniqueName: \"kubernetes.io/projected/0c93dc9b-fba0-4d28-b8b3-8def5f66d466-kube-api-access-rgqtw\") pod \"downloads-7954f5f757-48sxr\" (UID: \"0c93dc9b-fba0-4d28-b8b3-8def5f66d466\") " pod="openshift-console/downloads-7954f5f757-48sxr" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.115602 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gxd62\" (UniqueName: \"kubernetes.io/projected/e7df6d80-4ed7-4192-b079-bd0119903e10-kube-api-access-gxd62\") pod \"openshift-config-operator-7777fb866f-b84ph\" (UID: \"e7df6d80-4ed7-4192-b079-bd0119903e10\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-b84ph" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.118945 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.138751 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.186391 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pxc25" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.194498 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.201780 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.202263 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-qmh5z" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.213583 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-2k22x" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.216986 4605 request.go:700] Waited for 1.892254899s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/hostpath-provisioner/secrets?fieldSelector=metadata.name%3Dcsi-hostpath-provisioner-sa-dockercfg-qd74k&limit=500&resourceVersion=0 Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.218859 4605 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.219367 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vjmf6\" (UniqueName: \"kubernetes.io/projected/82715fa2-9367-44b6-a3b4-54b01d4865f1-kube-api-access-vjmf6\") pod \"cluster-samples-operator-665b6dd947-2bmnh\" (UID: \"82715fa2-9367-44b6-a3b4-54b01d4865f1\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2bmnh" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.231469 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2bmnh" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.238784 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.255334 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-48sxr" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.262209 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.280338 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.299030 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.322434 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8mkj9" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.324561 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.339152 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.347454 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-8vpb7" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.359562 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.380805 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.385002 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-b84ph" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.402567 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-trd6j"] Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.439419 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hzn2s\" (UniqueName: \"kubernetes.io/projected/4f8a1899-74f1-4a04-b5e8-2b8a3b60ddd1-kube-api-access-hzn2s\") pod \"migrator-59844c95c7-wkzl6\" (UID: \"4f8a1899-74f1-4a04-b5e8-2b8a3b60ddd1\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-wkzl6" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.442608 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6vg6t\" (UniqueName: \"kubernetes.io/projected/33744436-5acf-4628-bde8-81e0b2029f1d-kube-api-access-6vg6t\") pod \"etcd-operator-b45778765-5llhc\" (UID: \"33744436-5acf-4628-bde8-81e0b2029f1d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-5llhc" Oct 01 13:46:59 crc kubenswrapper[4605]: W1001 13:46:59.447856 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7f6de1d9_61b5_4cc6_a820_5492052b60ef.slice/crio-410f3596f784a7a1c2a185af945f5ce66d7471fd5c4873925db7de33891632a9 WatchSource:0}: Error finding container 410f3596f784a7a1c2a185af945f5ce66d7471fd5c4873925db7de33891632a9: Status 404 returned error can't find the container with id 410f3596f784a7a1c2a185af945f5ce66d7471fd5c4873925db7de33891632a9 Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.467241 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-5llhc" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.481420 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s4s29\" (UniqueName: \"kubernetes.io/projected/263bef0b-5d5b-40e6-ac60-ff6c4d4217af-kube-api-access-s4s29\") pod \"packageserver-d55dfcdfc-dpknn\" (UID: \"263bef0b-5d5b-40e6-ac60-ff6c4d4217af\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dpknn" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.484789 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7dnwl\" (UniqueName: \"kubernetes.io/projected/c0c81d24-bbca-4bc9-af4c-48f18da95147-kube-api-access-7dnwl\") pod \"service-ca-operator-777779d784-9v5mh\" (UID: \"c0c81d24-bbca-4bc9-af4c-48f18da95147\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-9v5mh" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.487406 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-6pn8g"] Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.505479 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-wkzl6" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.513728 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bvx5d\" (UniqueName: \"kubernetes.io/projected/6a1bba23-826a-4f52-8c53-f7364363a6f7-kube-api-access-bvx5d\") pod \"ingress-operator-5b745b69d9-j6crh\" (UID: \"6a1bba23-826a-4f52-8c53-f7364363a6f7\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-j6crh" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.527476 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f455f5fd-4c5c-487c-9d5a-f86a5c16ae20-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-frtv2\" (UID: \"f455f5fd-4c5c-487c-9d5a-f86a5c16ae20\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-frtv2" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.533753 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-9v5mh" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.552521 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/6a1bba23-826a-4f52-8c53-f7364363a6f7-bound-sa-token\") pod \"ingress-operator-5b745b69d9-j6crh\" (UID: \"6a1bba23-826a-4f52-8c53-f7364363a6f7\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-j6crh" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.592349 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-2bkjp"] Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.611312 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g757t\" (UniqueName: \"kubernetes.io/projected/27c52669-b56d-45ea-a605-015e92a313e6-kube-api-access-g757t\") pod \"package-server-manager-789f6589d5-jxwcz\" (UID: \"27c52669-b56d-45ea-a605-015e92a313e6\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-jxwcz" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.639534 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rnnqk\" (UniqueName: \"kubernetes.io/projected/2517f8ad-404f-470e-a785-c4c90c2cdc0d-kube-api-access-rnnqk\") pod \"olm-operator-6b444d44fb-rx67q\" (UID: \"2517f8ad-404f-470e-a785-c4c90c2cdc0d\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rx67q" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.645758 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ebc0f2e4-730c-4957-bf35-df11f426f04e-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-g59zg\" (UID: \"ebc0f2e4-730c-4957-bf35-df11f426f04e\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-g59zg" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.652359 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6lnpp\" (UniqueName: \"kubernetes.io/projected/828f308f-0b92-4019-ab24-96477d0b6a47-kube-api-access-6lnpp\") pod \"catalog-operator-68c6474976-hq6j4\" (UID: \"828f308f-0b92-4019-ab24-96477d0b6a47\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-hq6j4" Oct 01 13:46:59 crc kubenswrapper[4605]: W1001 13:46:59.652494 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc55cb7b3_eaaf_486b_940e_d6ffd06ce8da.slice/crio-e2d92e054d92c432d2af305e1f8ecb43ecb71672f67bb6743be8b8d6be3a1142 WatchSource:0}: Error finding container e2d92e054d92c432d2af305e1f8ecb43ecb71672f67bb6743be8b8d6be3a1142: Status 404 returned error can't find the container with id e2d92e054d92c432d2af305e1f8ecb43ecb71672f67bb6743be8b8d6be3a1142 Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.701444 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/d71c176f-21e9-4ec4-ba8f-ae4a58649b73-proxy-tls\") pod \"machine-config-controller-84d6567774-cpppf\" (UID: \"d71c176f-21e9-4ec4-ba8f-ae4a58649b73\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-cpppf" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.701480 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dw4zp\" (UniqueName: \"kubernetes.io/projected/20fe9925-9f6b-4b69-a13d-e8ff88daaec6-kube-api-access-dw4zp\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.701496 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ff4d6\" (UniqueName: \"kubernetes.io/projected/ee291562-056e-4ca5-b326-a412ba18d5db-kube-api-access-ff4d6\") pod \"multus-admission-controller-857f4d67dd-w9b29\" (UID: \"ee291562-056e-4ca5-b326-a412ba18d5db\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-w9b29" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.701515 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5271f9c4-f151-4e96-b831-93459666b524-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-hhbgt\" (UID: \"5271f9c4-f151-4e96-b831-93459666b524\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hhbgt" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.701533 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/20fe9925-9f6b-4b69-a13d-e8ff88daaec6-ca-trust-extracted\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.701547 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/ee291562-056e-4ca5-b326-a412ba18d5db-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-w9b29\" (UID: \"ee291562-056e-4ca5-b326-a412ba18d5db\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-w9b29" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.702688 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/20fe9925-9f6b-4b69-a13d-e8ff88daaec6-registry-certificates\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.702726 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/20fe9925-9f6b-4b69-a13d-e8ff88daaec6-trusted-ca\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.702744 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/16842335-be0d-4f69-b0af-e98b21c572ab-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-pwm5c\" (UID: \"16842335-be0d-4f69-b0af-e98b21c572ab\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-pwm5c" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.702772 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p4wkx\" (UniqueName: \"kubernetes.io/projected/d71c176f-21e9-4ec4-ba8f-ae4a58649b73-kube-api-access-p4wkx\") pod \"machine-config-controller-84d6567774-cpppf\" (UID: \"d71c176f-21e9-4ec4-ba8f-ae4a58649b73\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-cpppf" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.702797 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jw2v2\" (UniqueName: \"kubernetes.io/projected/16842335-be0d-4f69-b0af-e98b21c572ab-kube-api-access-jw2v2\") pod \"control-plane-machine-set-operator-78cbb6b69f-pwm5c\" (UID: \"16842335-be0d-4f69-b0af-e98b21c572ab\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-pwm5c" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.702836 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/20fe9925-9f6b-4b69-a13d-e8ff88daaec6-registry-tls\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.702901 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/20fe9925-9f6b-4b69-a13d-e8ff88daaec6-installation-pull-secrets\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.702919 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/20fe9925-9f6b-4b69-a13d-e8ff88daaec6-bound-sa-token\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.702935 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/5271f9c4-f151-4e96-b831-93459666b524-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-hhbgt\" (UID: \"5271f9c4-f151-4e96-b831-93459666b524\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hhbgt" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.702951 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/d71c176f-21e9-4ec4-ba8f-ae4a58649b73-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-cpppf\" (UID: \"d71c176f-21e9-4ec4-ba8f-ae4a58649b73\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-cpppf" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.702974 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.703008 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bt8rs\" (UniqueName: \"kubernetes.io/projected/5271f9c4-f151-4e96-b831-93459666b524-kube-api-access-bt8rs\") pod \"cluster-image-registry-operator-dc59b4c8b-hhbgt\" (UID: \"5271f9c4-f151-4e96-b831-93459666b524\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hhbgt" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.703028 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/5271f9c4-f151-4e96-b831-93459666b524-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-hhbgt\" (UID: \"5271f9c4-f151-4e96-b831-93459666b524\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hhbgt" Oct 01 13:46:59 crc kubenswrapper[4605]: E1001 13:46:59.705218 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 13:47:00.205200953 +0000 UTC m=+142.949177211 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ncbbt" (UID: "20fe9925-9f6b-4b69-a13d-e8ff88daaec6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.711506 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-2brwf"] Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.743831 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dpknn" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.744513 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rx67q" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.744620 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-hq6j4" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.754300 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-j6crh" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.768559 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-pxc25"] Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.784279 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-g59zg" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.806373 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-jxwcz" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.806917 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.807175 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h9djv\" (UniqueName: \"kubernetes.io/projected/61dd9b83-5164-4f9d-b3b3-2599072d6593-kube-api-access-h9djv\") pod \"ingress-canary-kwml2\" (UID: \"61dd9b83-5164-4f9d-b3b3-2599072d6593\") " pod="openshift-ingress-canary/ingress-canary-kwml2" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.807221 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lbz86\" (UniqueName: \"kubernetes.io/projected/c910e340-5d0a-4d6d-a570-3c1a5d82231b-kube-api-access-lbz86\") pod \"csi-hostpathplugin-5fjl6\" (UID: \"c910e340-5d0a-4d6d-a570-3c1a5d82231b\") " pod="hostpath-provisioner/csi-hostpathplugin-5fjl6" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.807255 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bt8rs\" (UniqueName: \"kubernetes.io/projected/5271f9c4-f151-4e96-b831-93459666b524-kube-api-access-bt8rs\") pod \"cluster-image-registry-operator-dc59b4c8b-hhbgt\" (UID: \"5271f9c4-f151-4e96-b831-93459666b524\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hhbgt" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.807285 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/339fa1de-0cd5-4503-89c9-ff0ca9b0f8a9-stats-auth\") pod \"router-default-5444994796-kdrdj\" (UID: \"339fa1de-0cd5-4503-89c9-ff0ca9b0f8a9\") " pod="openshift-ingress/router-default-5444994796-kdrdj" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.807314 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/5271f9c4-f151-4e96-b831-93459666b524-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-hhbgt\" (UID: \"5271f9c4-f151-4e96-b831-93459666b524\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hhbgt" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.807351 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/c910e340-5d0a-4d6d-a570-3c1a5d82231b-registration-dir\") pod \"csi-hostpathplugin-5fjl6\" (UID: \"c910e340-5d0a-4d6d-a570-3c1a5d82231b\") " pod="hostpath-provisioner/csi-hostpathplugin-5fjl6" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.807408 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/c876f01f-3cca-49fd-98f1-3232fd165176-node-bootstrap-token\") pod \"machine-config-server-sfvwc\" (UID: \"c876f01f-3cca-49fd-98f1-3232fd165176\") " pod="openshift-machine-config-operator/machine-config-server-sfvwc" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.807480 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/339fa1de-0cd5-4503-89c9-ff0ca9b0f8a9-default-certificate\") pod \"router-default-5444994796-kdrdj\" (UID: \"339fa1de-0cd5-4503-89c9-ff0ca9b0f8a9\") " pod="openshift-ingress/router-default-5444994796-kdrdj" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.807523 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c4622426-9b45-4026-ac8a-3390697075ef-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-lhptc\" (UID: \"c4622426-9b45-4026-ac8a-3390697075ef\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lhptc" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.807540 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9dp4l\" (UniqueName: \"kubernetes.io/projected/9317edb7-8cf8-4045-95a9-196349d21055-kube-api-access-9dp4l\") pod \"marketplace-operator-79b997595-hv7vm\" (UID: \"9317edb7-8cf8-4045-95a9-196349d21055\") " pod="openshift-marketplace/marketplace-operator-79b997595-hv7vm" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.807567 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/c910e340-5d0a-4d6d-a570-3c1a5d82231b-csi-data-dir\") pod \"csi-hostpathplugin-5fjl6\" (UID: \"c910e340-5d0a-4d6d-a570-3c1a5d82231b\") " pod="hostpath-provisioner/csi-hostpathplugin-5fjl6" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.807612 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/d71c176f-21e9-4ec4-ba8f-ae4a58649b73-proxy-tls\") pod \"machine-config-controller-84d6567774-cpppf\" (UID: \"d71c176f-21e9-4ec4-ba8f-ae4a58649b73\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-cpppf" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.807650 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dw4zp\" (UniqueName: \"kubernetes.io/projected/20fe9925-9f6b-4b69-a13d-e8ff88daaec6-kube-api-access-dw4zp\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.807669 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ff4d6\" (UniqueName: \"kubernetes.io/projected/ee291562-056e-4ca5-b326-a412ba18d5db-kube-api-access-ff4d6\") pod \"multus-admission-controller-857f4d67dd-w9b29\" (UID: \"ee291562-056e-4ca5-b326-a412ba18d5db\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-w9b29" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.807700 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c4622426-9b45-4026-ac8a-3390697075ef-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-lhptc\" (UID: \"c4622426-9b45-4026-ac8a-3390697075ef\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lhptc" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.807715 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/238e77bf-d5db-463a-91b7-b6b8be0d642b-proxy-tls\") pod \"machine-config-operator-74547568cd-qqwf9\" (UID: \"238e77bf-d5db-463a-91b7-b6b8be0d642b\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-qqwf9" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.807751 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5271f9c4-f151-4e96-b831-93459666b524-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-hhbgt\" (UID: \"5271f9c4-f151-4e96-b831-93459666b524\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hhbgt" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.807794 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v9ljd\" (UniqueName: \"kubernetes.io/projected/c876f01f-3cca-49fd-98f1-3232fd165176-kube-api-access-v9ljd\") pod \"machine-config-server-sfvwc\" (UID: \"c876f01f-3cca-49fd-98f1-3232fd165176\") " pod="openshift-machine-config-operator/machine-config-server-sfvwc" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.807833 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/20fe9925-9f6b-4b69-a13d-e8ff88daaec6-ca-trust-extracted\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.807849 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/ee291562-056e-4ca5-b326-a412ba18d5db-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-w9b29\" (UID: \"ee291562-056e-4ca5-b326-a412ba18d5db\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-w9b29" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.807883 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/c876f01f-3cca-49fd-98f1-3232fd165176-certs\") pod \"machine-config-server-sfvwc\" (UID: \"c876f01f-3cca-49fd-98f1-3232fd165176\") " pod="openshift-machine-config-operator/machine-config-server-sfvwc" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.807901 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n2fpb\" (UniqueName: \"kubernetes.io/projected/fbfbaeff-08b6-47b1-b356-bf366cb0ed75-kube-api-access-n2fpb\") pod \"openshift-apiserver-operator-796bbdcf4f-gsjr2\" (UID: \"fbfbaeff-08b6-47b1-b356-bf366cb0ed75\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-gsjr2" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.807936 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/20fe9925-9f6b-4b69-a13d-e8ff88daaec6-registry-certificates\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.807960 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-82p8n\" (UniqueName: \"kubernetes.io/projected/da66068e-3e86-487b-9a81-e4edd65c71cb-kube-api-access-82p8n\") pod \"dns-default-9h8zv\" (UID: \"da66068e-3e86-487b-9a81-e4edd65c71cb\") " pod="openshift-dns/dns-default-9h8zv" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.808023 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/20fe9925-9f6b-4b69-a13d-e8ff88daaec6-trusted-ca\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.808039 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/339fa1de-0cd5-4503-89c9-ff0ca9b0f8a9-service-ca-bundle\") pod \"router-default-5444994796-kdrdj\" (UID: \"339fa1de-0cd5-4503-89c9-ff0ca9b0f8a9\") " pod="openshift-ingress/router-default-5444994796-kdrdj" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.808069 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/16842335-be0d-4f69-b0af-e98b21c572ab-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-pwm5c\" (UID: \"16842335-be0d-4f69-b0af-e98b21c572ab\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-pwm5c" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.808087 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p4wkx\" (UniqueName: \"kubernetes.io/projected/d71c176f-21e9-4ec4-ba8f-ae4a58649b73-kube-api-access-p4wkx\") pod \"machine-config-controller-84d6567774-cpppf\" (UID: \"d71c176f-21e9-4ec4-ba8f-ae4a58649b73\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-cpppf" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.808127 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/1ad4e395-06d2-483c-a847-16df8df7ec3c-signing-key\") pod \"service-ca-9c57cc56f-hntgm\" (UID: \"1ad4e395-06d2-483c-a847-16df8df7ec3c\") " pod="openshift-service-ca/service-ca-9c57cc56f-hntgm" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.808166 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jw2v2\" (UniqueName: \"kubernetes.io/projected/16842335-be0d-4f69-b0af-e98b21c572ab-kube-api-access-jw2v2\") pod \"control-plane-machine-set-operator-78cbb6b69f-pwm5c\" (UID: \"16842335-be0d-4f69-b0af-e98b21c572ab\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-pwm5c" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.808188 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7058af57-9197-463d-8171-d1be29016903-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-n452f\" (UID: \"7058af57-9197-463d-8171-d1be29016903\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-n452f" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.808209 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bgq8t\" (UniqueName: \"kubernetes.io/projected/1ad4e395-06d2-483c-a847-16df8df7ec3c-kube-api-access-bgq8t\") pod \"service-ca-9c57cc56f-hntgm\" (UID: \"1ad4e395-06d2-483c-a847-16df8df7ec3c\") " pod="openshift-service-ca/service-ca-9c57cc56f-hntgm" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.808230 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fbfbaeff-08b6-47b1-b356-bf366cb0ed75-config\") pod \"openshift-apiserver-operator-796bbdcf4f-gsjr2\" (UID: \"fbfbaeff-08b6-47b1-b356-bf366cb0ed75\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-gsjr2" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.808297 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/339fa1de-0cd5-4503-89c9-ff0ca9b0f8a9-metrics-certs\") pod \"router-default-5444994796-kdrdj\" (UID: \"339fa1de-0cd5-4503-89c9-ff0ca9b0f8a9\") " pod="openshift-ingress/router-default-5444994796-kdrdj" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.808312 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7058af57-9197-463d-8171-d1be29016903-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-n452f\" (UID: \"7058af57-9197-463d-8171-d1be29016903\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-n452f" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.808364 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/da66068e-3e86-487b-9a81-e4edd65c71cb-config-volume\") pod \"dns-default-9h8zv\" (UID: \"da66068e-3e86-487b-9a81-e4edd65c71cb\") " pod="openshift-dns/dns-default-9h8zv" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.808401 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/20fe9925-9f6b-4b69-a13d-e8ff88daaec6-registry-tls\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.808418 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/61dd9b83-5164-4f9d-b3b3-2599072d6593-cert\") pod \"ingress-canary-kwml2\" (UID: \"61dd9b83-5164-4f9d-b3b3-2599072d6593\") " pod="openshift-ingress-canary/ingress-canary-kwml2" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.808434 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/c910e340-5d0a-4d6d-a570-3c1a5d82231b-plugins-dir\") pod \"csi-hostpathplugin-5fjl6\" (UID: \"c910e340-5d0a-4d6d-a570-3c1a5d82231b\") " pod="hostpath-provisioner/csi-hostpathplugin-5fjl6" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.808450 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/da66068e-3e86-487b-9a81-e4edd65c71cb-metrics-tls\") pod \"dns-default-9h8zv\" (UID: \"da66068e-3e86-487b-9a81-e4edd65c71cb\") " pod="openshift-dns/dns-default-9h8zv" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.808490 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/238e77bf-d5db-463a-91b7-b6b8be0d642b-auth-proxy-config\") pod \"machine-config-operator-74547568cd-qqwf9\" (UID: \"238e77bf-d5db-463a-91b7-b6b8be0d642b\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-qqwf9" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.808506 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/9317edb7-8cf8-4045-95a9-196349d21055-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-hv7vm\" (UID: \"9317edb7-8cf8-4045-95a9-196349d21055\") " pod="openshift-marketplace/marketplace-operator-79b997595-hv7vm" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.808523 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qkkbz\" (UniqueName: \"kubernetes.io/projected/238e77bf-d5db-463a-91b7-b6b8be0d642b-kube-api-access-qkkbz\") pod \"machine-config-operator-74547568cd-qqwf9\" (UID: \"238e77bf-d5db-463a-91b7-b6b8be0d642b\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-qqwf9" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.808542 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fbfbaeff-08b6-47b1-b356-bf366cb0ed75-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-gsjr2\" (UID: \"fbfbaeff-08b6-47b1-b356-bf366cb0ed75\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-gsjr2" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.808588 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/238e77bf-d5db-463a-91b7-b6b8be0d642b-images\") pod \"machine-config-operator-74547568cd-qqwf9\" (UID: \"238e77bf-d5db-463a-91b7-b6b8be0d642b\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-qqwf9" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.808603 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ea3e40db-79b6-4499-b7e6-71bd46c55663-config-volume\") pod \"collect-profiles-29322105-br8qr\" (UID: \"ea3e40db-79b6-4499-b7e6-71bd46c55663\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322105-br8qr" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.808617 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/c910e340-5d0a-4d6d-a570-3c1a5d82231b-mountpoint-dir\") pod \"csi-hostpathplugin-5fjl6\" (UID: \"c910e340-5d0a-4d6d-a570-3c1a5d82231b\") " pod="hostpath-provisioner/csi-hostpathplugin-5fjl6" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.808640 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t5ww6\" (UniqueName: \"kubernetes.io/projected/339fa1de-0cd5-4503-89c9-ff0ca9b0f8a9-kube-api-access-t5ww6\") pod \"router-default-5444994796-kdrdj\" (UID: \"339fa1de-0cd5-4503-89c9-ff0ca9b0f8a9\") " pod="openshift-ingress/router-default-5444994796-kdrdj" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.808686 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/20fe9925-9f6b-4b69-a13d-e8ff88daaec6-installation-pull-secrets\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.808702 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ea3e40db-79b6-4499-b7e6-71bd46c55663-secret-volume\") pod \"collect-profiles-29322105-br8qr\" (UID: \"ea3e40db-79b6-4499-b7e6-71bd46c55663\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322105-br8qr" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.808718 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9317edb7-8cf8-4045-95a9-196349d21055-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-hv7vm\" (UID: \"9317edb7-8cf8-4045-95a9-196349d21055\") " pod="openshift-marketplace/marketplace-operator-79b997595-hv7vm" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.808753 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/c910e340-5d0a-4d6d-a570-3c1a5d82231b-socket-dir\") pod \"csi-hostpathplugin-5fjl6\" (UID: \"c910e340-5d0a-4d6d-a570-3c1a5d82231b\") " pod="hostpath-provisioner/csi-hostpathplugin-5fjl6" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.808788 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/1ad4e395-06d2-483c-a847-16df8df7ec3c-signing-cabundle\") pod \"service-ca-9c57cc56f-hntgm\" (UID: \"1ad4e395-06d2-483c-a847-16df8df7ec3c\") " pod="openshift-service-ca/service-ca-9c57cc56f-hntgm" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.808804 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c4622426-9b45-4026-ac8a-3390697075ef-config\") pod \"kube-controller-manager-operator-78b949d7b-lhptc\" (UID: \"c4622426-9b45-4026-ac8a-3390697075ef\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lhptc" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.808819 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2m9jl\" (UniqueName: \"kubernetes.io/projected/ea3e40db-79b6-4499-b7e6-71bd46c55663-kube-api-access-2m9jl\") pod \"collect-profiles-29322105-br8qr\" (UID: \"ea3e40db-79b6-4499-b7e6-71bd46c55663\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322105-br8qr" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.808845 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/20fe9925-9f6b-4b69-a13d-e8ff88daaec6-bound-sa-token\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.808889 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/5271f9c4-f151-4e96-b831-93459666b524-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-hhbgt\" (UID: \"5271f9c4-f151-4e96-b831-93459666b524\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hhbgt" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.808907 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/d71c176f-21e9-4ec4-ba8f-ae4a58649b73-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-cpppf\" (UID: \"d71c176f-21e9-4ec4-ba8f-ae4a58649b73\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-cpppf" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.808949 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6cwm7\" (UniqueName: \"kubernetes.io/projected/7058af57-9197-463d-8171-d1be29016903-kube-api-access-6cwm7\") pod \"kube-storage-version-migrator-operator-b67b599dd-n452f\" (UID: \"7058af57-9197-463d-8171-d1be29016903\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-n452f" Oct 01 13:46:59 crc kubenswrapper[4605]: E1001 13:46:59.809937 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 13:47:00.309916355 +0000 UTC m=+143.053892563 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.824906 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/20fe9925-9f6b-4b69-a13d-e8ff88daaec6-ca-trust-extracted\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.834819 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-frtv2" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.836976 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5271f9c4-f151-4e96-b831-93459666b524-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-hhbgt\" (UID: \"5271f9c4-f151-4e96-b831-93459666b524\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hhbgt" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.838998 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/ee291562-056e-4ca5-b326-a412ba18d5db-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-w9b29\" (UID: \"ee291562-056e-4ca5-b326-a412ba18d5db\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-w9b29" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.846896 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/20fe9925-9f6b-4b69-a13d-e8ff88daaec6-trusted-ca\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.854149 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/20fe9925-9f6b-4b69-a13d-e8ff88daaec6-registry-certificates\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.854155 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/d71c176f-21e9-4ec4-ba8f-ae4a58649b73-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-cpppf\" (UID: \"d71c176f-21e9-4ec4-ba8f-ae4a58649b73\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-cpppf" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.866953 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-6pn8g" event={"ID":"161a8f41-2f6c-49d4-9efe-4d27a50ed622","Type":"ContainerStarted","Data":"34d482313cbb3452bc2fd128401a303843835a9f81a96d8cc4f95c3f61f97342"} Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.867706 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-trd6j" event={"ID":"7f6de1d9-61b5-4cc6-a820-5492052b60ef","Type":"ContainerStarted","Data":"410f3596f784a7a1c2a185af945f5ce66d7471fd5c4873925db7de33891632a9"} Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.870225 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-2bkjp" event={"ID":"c55cb7b3-eaaf-486b-940e-d6ffd06ce8da","Type":"ContainerStarted","Data":"e2d92e054d92c432d2af305e1f8ecb43ecb71672f67bb6743be8b8d6be3a1142"} Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.886639 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/20fe9925-9f6b-4b69-a13d-e8ff88daaec6-installation-pull-secrets\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.892945 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/20fe9925-9f6b-4b69-a13d-e8ff88daaec6-registry-tls\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.893861 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/d71c176f-21e9-4ec4-ba8f-ae4a58649b73-proxy-tls\") pod \"machine-config-controller-84d6567774-cpppf\" (UID: \"d71c176f-21e9-4ec4-ba8f-ae4a58649b73\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-cpppf" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.909551 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/16842335-be0d-4f69-b0af-e98b21c572ab-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-pwm5c\" (UID: \"16842335-be0d-4f69-b0af-e98b21c572ab\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-pwm5c" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.909779 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/5271f9c4-f151-4e96-b831-93459666b524-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-hhbgt\" (UID: \"5271f9c4-f151-4e96-b831-93459666b524\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hhbgt" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.910170 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bt8rs\" (UniqueName: \"kubernetes.io/projected/5271f9c4-f151-4e96-b831-93459666b524-kube-api-access-bt8rs\") pod \"cluster-image-registry-operator-dc59b4c8b-hhbgt\" (UID: \"5271f9c4-f151-4e96-b831-93459666b524\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hhbgt" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.910328 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/da66068e-3e86-487b-9a81-e4edd65c71cb-config-volume\") pod \"dns-default-9h8zv\" (UID: \"da66068e-3e86-487b-9a81-e4edd65c71cb\") " pod="openshift-dns/dns-default-9h8zv" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.910350 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/61dd9b83-5164-4f9d-b3b3-2599072d6593-cert\") pod \"ingress-canary-kwml2\" (UID: \"61dd9b83-5164-4f9d-b3b3-2599072d6593\") " pod="openshift-ingress-canary/ingress-canary-kwml2" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.910368 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/c910e340-5d0a-4d6d-a570-3c1a5d82231b-plugins-dir\") pod \"csi-hostpathplugin-5fjl6\" (UID: \"c910e340-5d0a-4d6d-a570-3c1a5d82231b\") " pod="hostpath-provisioner/csi-hostpathplugin-5fjl6" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.910384 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/da66068e-3e86-487b-9a81-e4edd65c71cb-metrics-tls\") pod \"dns-default-9h8zv\" (UID: \"da66068e-3e86-487b-9a81-e4edd65c71cb\") " pod="openshift-dns/dns-default-9h8zv" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.910411 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/238e77bf-d5db-463a-91b7-b6b8be0d642b-auth-proxy-config\") pod \"machine-config-operator-74547568cd-qqwf9\" (UID: \"238e77bf-d5db-463a-91b7-b6b8be0d642b\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-qqwf9" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.910429 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/9317edb7-8cf8-4045-95a9-196349d21055-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-hv7vm\" (UID: \"9317edb7-8cf8-4045-95a9-196349d21055\") " pod="openshift-marketplace/marketplace-operator-79b997595-hv7vm" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.910447 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qkkbz\" (UniqueName: \"kubernetes.io/projected/238e77bf-d5db-463a-91b7-b6b8be0d642b-kube-api-access-qkkbz\") pod \"machine-config-operator-74547568cd-qqwf9\" (UID: \"238e77bf-d5db-463a-91b7-b6b8be0d642b\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-qqwf9" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.910462 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fbfbaeff-08b6-47b1-b356-bf366cb0ed75-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-gsjr2\" (UID: \"fbfbaeff-08b6-47b1-b356-bf366cb0ed75\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-gsjr2" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.910481 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/238e77bf-d5db-463a-91b7-b6b8be0d642b-images\") pod \"machine-config-operator-74547568cd-qqwf9\" (UID: \"238e77bf-d5db-463a-91b7-b6b8be0d642b\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-qqwf9" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.910500 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ea3e40db-79b6-4499-b7e6-71bd46c55663-config-volume\") pod \"collect-profiles-29322105-br8qr\" (UID: \"ea3e40db-79b6-4499-b7e6-71bd46c55663\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322105-br8qr" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.910519 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t5ww6\" (UniqueName: \"kubernetes.io/projected/339fa1de-0cd5-4503-89c9-ff0ca9b0f8a9-kube-api-access-t5ww6\") pod \"router-default-5444994796-kdrdj\" (UID: \"339fa1de-0cd5-4503-89c9-ff0ca9b0f8a9\") " pod="openshift-ingress/router-default-5444994796-kdrdj" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.910542 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/c910e340-5d0a-4d6d-a570-3c1a5d82231b-mountpoint-dir\") pod \"csi-hostpathplugin-5fjl6\" (UID: \"c910e340-5d0a-4d6d-a570-3c1a5d82231b\") " pod="hostpath-provisioner/csi-hostpathplugin-5fjl6" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.910570 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/c910e340-5d0a-4d6d-a570-3c1a5d82231b-socket-dir\") pod \"csi-hostpathplugin-5fjl6\" (UID: \"c910e340-5d0a-4d6d-a570-3c1a5d82231b\") " pod="hostpath-provisioner/csi-hostpathplugin-5fjl6" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.910589 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ea3e40db-79b6-4499-b7e6-71bd46c55663-secret-volume\") pod \"collect-profiles-29322105-br8qr\" (UID: \"ea3e40db-79b6-4499-b7e6-71bd46c55663\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322105-br8qr" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.910611 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9317edb7-8cf8-4045-95a9-196349d21055-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-hv7vm\" (UID: \"9317edb7-8cf8-4045-95a9-196349d21055\") " pod="openshift-marketplace/marketplace-operator-79b997595-hv7vm" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.910628 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/1ad4e395-06d2-483c-a847-16df8df7ec3c-signing-cabundle\") pod \"service-ca-9c57cc56f-hntgm\" (UID: \"1ad4e395-06d2-483c-a847-16df8df7ec3c\") " pod="openshift-service-ca/service-ca-9c57cc56f-hntgm" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.910645 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c4622426-9b45-4026-ac8a-3390697075ef-config\") pod \"kube-controller-manager-operator-78b949d7b-lhptc\" (UID: \"c4622426-9b45-4026-ac8a-3390697075ef\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lhptc" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.910663 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2m9jl\" (UniqueName: \"kubernetes.io/projected/ea3e40db-79b6-4499-b7e6-71bd46c55663-kube-api-access-2m9jl\") pod \"collect-profiles-29322105-br8qr\" (UID: \"ea3e40db-79b6-4499-b7e6-71bd46c55663\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322105-br8qr" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.910701 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.910726 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6cwm7\" (UniqueName: \"kubernetes.io/projected/7058af57-9197-463d-8171-d1be29016903-kube-api-access-6cwm7\") pod \"kube-storage-version-migrator-operator-b67b599dd-n452f\" (UID: \"7058af57-9197-463d-8171-d1be29016903\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-n452f" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.910751 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h9djv\" (UniqueName: \"kubernetes.io/projected/61dd9b83-5164-4f9d-b3b3-2599072d6593-kube-api-access-h9djv\") pod \"ingress-canary-kwml2\" (UID: \"61dd9b83-5164-4f9d-b3b3-2599072d6593\") " pod="openshift-ingress-canary/ingress-canary-kwml2" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.910770 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lbz86\" (UniqueName: \"kubernetes.io/projected/c910e340-5d0a-4d6d-a570-3c1a5d82231b-kube-api-access-lbz86\") pod \"csi-hostpathplugin-5fjl6\" (UID: \"c910e340-5d0a-4d6d-a570-3c1a5d82231b\") " pod="hostpath-provisioner/csi-hostpathplugin-5fjl6" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.910798 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/339fa1de-0cd5-4503-89c9-ff0ca9b0f8a9-stats-auth\") pod \"router-default-5444994796-kdrdj\" (UID: \"339fa1de-0cd5-4503-89c9-ff0ca9b0f8a9\") " pod="openshift-ingress/router-default-5444994796-kdrdj" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.910823 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/c910e340-5d0a-4d6d-a570-3c1a5d82231b-registration-dir\") pod \"csi-hostpathplugin-5fjl6\" (UID: \"c910e340-5d0a-4d6d-a570-3c1a5d82231b\") " pod="hostpath-provisioner/csi-hostpathplugin-5fjl6" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.910853 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/c876f01f-3cca-49fd-98f1-3232fd165176-node-bootstrap-token\") pod \"machine-config-server-sfvwc\" (UID: \"c876f01f-3cca-49fd-98f1-3232fd165176\") " pod="openshift-machine-config-operator/machine-config-server-sfvwc" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.910883 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/339fa1de-0cd5-4503-89c9-ff0ca9b0f8a9-default-certificate\") pod \"router-default-5444994796-kdrdj\" (UID: \"339fa1de-0cd5-4503-89c9-ff0ca9b0f8a9\") " pod="openshift-ingress/router-default-5444994796-kdrdj" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.910904 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c4622426-9b45-4026-ac8a-3390697075ef-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-lhptc\" (UID: \"c4622426-9b45-4026-ac8a-3390697075ef\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lhptc" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.910922 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9dp4l\" (UniqueName: \"kubernetes.io/projected/9317edb7-8cf8-4045-95a9-196349d21055-kube-api-access-9dp4l\") pod \"marketplace-operator-79b997595-hv7vm\" (UID: \"9317edb7-8cf8-4045-95a9-196349d21055\") " pod="openshift-marketplace/marketplace-operator-79b997595-hv7vm" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.910940 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/c910e340-5d0a-4d6d-a570-3c1a5d82231b-csi-data-dir\") pod \"csi-hostpathplugin-5fjl6\" (UID: \"c910e340-5d0a-4d6d-a570-3c1a5d82231b\") " pod="hostpath-provisioner/csi-hostpathplugin-5fjl6" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.910969 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c4622426-9b45-4026-ac8a-3390697075ef-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-lhptc\" (UID: \"c4622426-9b45-4026-ac8a-3390697075ef\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lhptc" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.910986 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/238e77bf-d5db-463a-91b7-b6b8be0d642b-proxy-tls\") pod \"machine-config-operator-74547568cd-qqwf9\" (UID: \"238e77bf-d5db-463a-91b7-b6b8be0d642b\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-qqwf9" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.911007 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v9ljd\" (UniqueName: \"kubernetes.io/projected/c876f01f-3cca-49fd-98f1-3232fd165176-kube-api-access-v9ljd\") pod \"machine-config-server-sfvwc\" (UID: \"c876f01f-3cca-49fd-98f1-3232fd165176\") " pod="openshift-machine-config-operator/machine-config-server-sfvwc" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.911027 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/c876f01f-3cca-49fd-98f1-3232fd165176-certs\") pod \"machine-config-server-sfvwc\" (UID: \"c876f01f-3cca-49fd-98f1-3232fd165176\") " pod="openshift-machine-config-operator/machine-config-server-sfvwc" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.911045 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n2fpb\" (UniqueName: \"kubernetes.io/projected/fbfbaeff-08b6-47b1-b356-bf366cb0ed75-kube-api-access-n2fpb\") pod \"openshift-apiserver-operator-796bbdcf4f-gsjr2\" (UID: \"fbfbaeff-08b6-47b1-b356-bf366cb0ed75\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-gsjr2" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.911063 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-82p8n\" (UniqueName: \"kubernetes.io/projected/da66068e-3e86-487b-9a81-e4edd65c71cb-kube-api-access-82p8n\") pod \"dns-default-9h8zv\" (UID: \"da66068e-3e86-487b-9a81-e4edd65c71cb\") " pod="openshift-dns/dns-default-9h8zv" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.911084 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/339fa1de-0cd5-4503-89c9-ff0ca9b0f8a9-service-ca-bundle\") pod \"router-default-5444994796-kdrdj\" (UID: \"339fa1de-0cd5-4503-89c9-ff0ca9b0f8a9\") " pod="openshift-ingress/router-default-5444994796-kdrdj" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.911126 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/1ad4e395-06d2-483c-a847-16df8df7ec3c-signing-key\") pod \"service-ca-9c57cc56f-hntgm\" (UID: \"1ad4e395-06d2-483c-a847-16df8df7ec3c\") " pod="openshift-service-ca/service-ca-9c57cc56f-hntgm" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.911152 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7058af57-9197-463d-8171-d1be29016903-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-n452f\" (UID: \"7058af57-9197-463d-8171-d1be29016903\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-n452f" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.911171 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bgq8t\" (UniqueName: \"kubernetes.io/projected/1ad4e395-06d2-483c-a847-16df8df7ec3c-kube-api-access-bgq8t\") pod \"service-ca-9c57cc56f-hntgm\" (UID: \"1ad4e395-06d2-483c-a847-16df8df7ec3c\") " pod="openshift-service-ca/service-ca-9c57cc56f-hntgm" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.911190 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fbfbaeff-08b6-47b1-b356-bf366cb0ed75-config\") pod \"openshift-apiserver-operator-796bbdcf4f-gsjr2\" (UID: \"fbfbaeff-08b6-47b1-b356-bf366cb0ed75\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-gsjr2" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.911219 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/339fa1de-0cd5-4503-89c9-ff0ca9b0f8a9-metrics-certs\") pod \"router-default-5444994796-kdrdj\" (UID: \"339fa1de-0cd5-4503-89c9-ff0ca9b0f8a9\") " pod="openshift-ingress/router-default-5444994796-kdrdj" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.911236 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7058af57-9197-463d-8171-d1be29016903-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-n452f\" (UID: \"7058af57-9197-463d-8171-d1be29016903\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-n452f" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.912934 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-lf6wn" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.913512 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/c910e340-5d0a-4d6d-a570-3c1a5d82231b-registration-dir\") pod \"csi-hostpathplugin-5fjl6\" (UID: \"c910e340-5d0a-4d6d-a570-3c1a5d82231b\") " pod="hostpath-provisioner/csi-hostpathplugin-5fjl6" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.942292 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/5271f9c4-f151-4e96-b831-93459666b524-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-hhbgt\" (UID: \"5271f9c4-f151-4e96-b831-93459666b524\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hhbgt" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.943245 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7058af57-9197-463d-8171-d1be29016903-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-n452f\" (UID: \"7058af57-9197-463d-8171-d1be29016903\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-n452f" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.945040 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dw4zp\" (UniqueName: \"kubernetes.io/projected/20fe9925-9f6b-4b69-a13d-e8ff88daaec6-kube-api-access-dw4zp\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.945300 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/238e77bf-d5db-463a-91b7-b6b8be0d642b-auth-proxy-config\") pod \"machine-config-operator-74547568cd-qqwf9\" (UID: \"238e77bf-d5db-463a-91b7-b6b8be0d642b\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-qqwf9" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.945753 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/339fa1de-0cd5-4503-89c9-ff0ca9b0f8a9-stats-auth\") pod \"router-default-5444994796-kdrdj\" (UID: \"339fa1de-0cd5-4503-89c9-ff0ca9b0f8a9\") " pod="openshift-ingress/router-default-5444994796-kdrdj" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.960815 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/339fa1de-0cd5-4503-89c9-ff0ca9b0f8a9-service-ca-bundle\") pod \"router-default-5444994796-kdrdj\" (UID: \"339fa1de-0cd5-4503-89c9-ff0ca9b0f8a9\") " pod="openshift-ingress/router-default-5444994796-kdrdj" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.962307 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/c910e340-5d0a-4d6d-a570-3c1a5d82231b-csi-data-dir\") pod \"csi-hostpathplugin-5fjl6\" (UID: \"c910e340-5d0a-4d6d-a570-3c1a5d82231b\") " pod="hostpath-provisioner/csi-hostpathplugin-5fjl6" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.975920 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/238e77bf-d5db-463a-91b7-b6b8be0d642b-images\") pod \"machine-config-operator-74547568cd-qqwf9\" (UID: \"238e77bf-d5db-463a-91b7-b6b8be0d642b\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-qqwf9" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.976595 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ea3e40db-79b6-4499-b7e6-71bd46c55663-config-volume\") pod \"collect-profiles-29322105-br8qr\" (UID: \"ea3e40db-79b6-4499-b7e6-71bd46c55663\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322105-br8qr" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.976750 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/c910e340-5d0a-4d6d-a570-3c1a5d82231b-mountpoint-dir\") pod \"csi-hostpathplugin-5fjl6\" (UID: \"c910e340-5d0a-4d6d-a570-3c1a5d82231b\") " pod="hostpath-provisioner/csi-hostpathplugin-5fjl6" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.976806 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/c910e340-5d0a-4d6d-a570-3c1a5d82231b-socket-dir\") pod \"csi-hostpathplugin-5fjl6\" (UID: \"c910e340-5d0a-4d6d-a570-3c1a5d82231b\") " pod="hostpath-provisioner/csi-hostpathplugin-5fjl6" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.983764 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fbfbaeff-08b6-47b1-b356-bf366cb0ed75-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-gsjr2\" (UID: \"fbfbaeff-08b6-47b1-b356-bf366cb0ed75\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-gsjr2" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.989516 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c4622426-9b45-4026-ac8a-3390697075ef-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-lhptc\" (UID: \"c4622426-9b45-4026-ac8a-3390697075ef\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lhptc" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.989823 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/238e77bf-d5db-463a-91b7-b6b8be0d642b-proxy-tls\") pod \"machine-config-operator-74547568cd-qqwf9\" (UID: \"238e77bf-d5db-463a-91b7-b6b8be0d642b\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-qqwf9" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.990282 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/c876f01f-3cca-49fd-98f1-3232fd165176-certs\") pod \"machine-config-server-sfvwc\" (UID: \"c876f01f-3cca-49fd-98f1-3232fd165176\") " pod="openshift-machine-config-operator/machine-config-server-sfvwc" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.992502 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/c910e340-5d0a-4d6d-a570-3c1a5d82231b-plugins-dir\") pod \"csi-hostpathplugin-5fjl6\" (UID: \"c910e340-5d0a-4d6d-a570-3c1a5d82231b\") " pod="hostpath-provisioner/csi-hostpathplugin-5fjl6" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.993444 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7058af57-9197-463d-8171-d1be29016903-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-n452f\" (UID: \"7058af57-9197-463d-8171-d1be29016903\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-n452f" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.995016 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fbfbaeff-08b6-47b1-b356-bf366cb0ed75-config\") pod \"openshift-apiserver-operator-796bbdcf4f-gsjr2\" (UID: \"fbfbaeff-08b6-47b1-b356-bf366cb0ed75\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-gsjr2" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.995125 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/da66068e-3e86-487b-9a81-e4edd65c71cb-config-volume\") pod \"dns-default-9h8zv\" (UID: \"da66068e-3e86-487b-9a81-e4edd65c71cb\") " pod="openshift-dns/dns-default-9h8zv" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.997778 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/61dd9b83-5164-4f9d-b3b3-2599072d6593-cert\") pod \"ingress-canary-kwml2\" (UID: \"61dd9b83-5164-4f9d-b3b3-2599072d6593\") " pod="openshift-ingress-canary/ingress-canary-kwml2" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.998551 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9317edb7-8cf8-4045-95a9-196349d21055-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-hv7vm\" (UID: \"9317edb7-8cf8-4045-95a9-196349d21055\") " pod="openshift-marketplace/marketplace-operator-79b997595-hv7vm" Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.998846 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/1ad4e395-06d2-483c-a847-16df8df7ec3c-signing-cabundle\") pod \"service-ca-9c57cc56f-hntgm\" (UID: \"1ad4e395-06d2-483c-a847-16df8df7ec3c\") " pod="openshift-service-ca/service-ca-9c57cc56f-hntgm" Oct 01 13:46:59 crc kubenswrapper[4605]: E1001 13:46:59.998897 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 13:47:00.498884349 +0000 UTC m=+143.242860557 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ncbbt" (UID: "20fe9925-9f6b-4b69-a13d-e8ff88daaec6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:46:59 crc kubenswrapper[4605]: I1001 13:46:59.999421 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c4622426-9b45-4026-ac8a-3390697075ef-config\") pod \"kube-controller-manager-operator-78b949d7b-lhptc\" (UID: \"c4622426-9b45-4026-ac8a-3390697075ef\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lhptc" Oct 01 13:47:00 crc kubenswrapper[4605]: I1001 13:47:00.002187 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/339fa1de-0cd5-4503-89c9-ff0ca9b0f8a9-metrics-certs\") pod \"router-default-5444994796-kdrdj\" (UID: \"339fa1de-0cd5-4503-89c9-ff0ca9b0f8a9\") " pod="openshift-ingress/router-default-5444994796-kdrdj" Oct 01 13:47:00 crc kubenswrapper[4605]: I1001 13:47:00.003240 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ff4d6\" (UniqueName: \"kubernetes.io/projected/ee291562-056e-4ca5-b326-a412ba18d5db-kube-api-access-ff4d6\") pod \"multus-admission-controller-857f4d67dd-w9b29\" (UID: \"ee291562-056e-4ca5-b326-a412ba18d5db\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-w9b29" Oct 01 13:47:00 crc kubenswrapper[4605]: I1001 13:47:00.005485 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/9317edb7-8cf8-4045-95a9-196349d21055-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-hv7vm\" (UID: \"9317edb7-8cf8-4045-95a9-196349d21055\") " pod="openshift-marketplace/marketplace-operator-79b997595-hv7vm" Oct 01 13:47:00 crc kubenswrapper[4605]: I1001 13:47:00.006337 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/c876f01f-3cca-49fd-98f1-3232fd165176-node-bootstrap-token\") pod \"machine-config-server-sfvwc\" (UID: \"c876f01f-3cca-49fd-98f1-3232fd165176\") " pod="openshift-machine-config-operator/machine-config-server-sfvwc" Oct 01 13:47:00 crc kubenswrapper[4605]: I1001 13:47:00.006736 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/339fa1de-0cd5-4503-89c9-ff0ca9b0f8a9-default-certificate\") pod \"router-default-5444994796-kdrdj\" (UID: \"339fa1de-0cd5-4503-89c9-ff0ca9b0f8a9\") " pod="openshift-ingress/router-default-5444994796-kdrdj" Oct 01 13:47:00 crc kubenswrapper[4605]: I1001 13:47:00.008198 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/1ad4e395-06d2-483c-a847-16df8df7ec3c-signing-key\") pod \"service-ca-9c57cc56f-hntgm\" (UID: \"1ad4e395-06d2-483c-a847-16df8df7ec3c\") " pod="openshift-service-ca/service-ca-9c57cc56f-hntgm" Oct 01 13:47:00 crc kubenswrapper[4605]: I1001 13:47:00.009069 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/da66068e-3e86-487b-9a81-e4edd65c71cb-metrics-tls\") pod \"dns-default-9h8zv\" (UID: \"da66068e-3e86-487b-9a81-e4edd65c71cb\") " pod="openshift-dns/dns-default-9h8zv" Oct 01 13:47:00 crc kubenswrapper[4605]: I1001 13:47:00.013970 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p4wkx\" (UniqueName: \"kubernetes.io/projected/d71c176f-21e9-4ec4-ba8f-ae4a58649b73-kube-api-access-p4wkx\") pod \"machine-config-controller-84d6567774-cpppf\" (UID: \"d71c176f-21e9-4ec4-ba8f-ae4a58649b73\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-cpppf" Oct 01 13:47:00 crc kubenswrapper[4605]: I1001 13:47:00.016892 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-48sxr"] Oct 01 13:47:00 crc kubenswrapper[4605]: I1001 13:47:00.017299 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:47:00 crc kubenswrapper[4605]: E1001 13:47:00.018875 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 13:47:00.518846466 +0000 UTC m=+143.262822674 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:00 crc kubenswrapper[4605]: I1001 13:47:00.028710 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jw2v2\" (UniqueName: \"kubernetes.io/projected/16842335-be0d-4f69-b0af-e98b21c572ab-kube-api-access-jw2v2\") pod \"control-plane-machine-set-operator-78cbb6b69f-pwm5c\" (UID: \"16842335-be0d-4f69-b0af-e98b21c572ab\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-pwm5c" Oct 01 13:47:00 crc kubenswrapper[4605]: I1001 13:47:00.030744 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v9ljd\" (UniqueName: \"kubernetes.io/projected/c876f01f-3cca-49fd-98f1-3232fd165176-kube-api-access-v9ljd\") pod \"machine-config-server-sfvwc\" (UID: \"c876f01f-3cca-49fd-98f1-3232fd165176\") " pod="openshift-machine-config-operator/machine-config-server-sfvwc" Oct 01 13:47:00 crc kubenswrapper[4605]: I1001 13:47:00.045119 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ea3e40db-79b6-4499-b7e6-71bd46c55663-secret-volume\") pod \"collect-profiles-29322105-br8qr\" (UID: \"ea3e40db-79b6-4499-b7e6-71bd46c55663\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322105-br8qr" Oct 01 13:47:00 crc kubenswrapper[4605]: I1001 13:47:00.054624 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/20fe9925-9f6b-4b69-a13d-e8ff88daaec6-bound-sa-token\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:47:00 crc kubenswrapper[4605]: I1001 13:47:00.100567 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-pwm5c" Oct 01 13:47:00 crc kubenswrapper[4605]: I1001 13:47:00.101586 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hhbgt" Oct 01 13:47:00 crc kubenswrapper[4605]: I1001 13:47:00.137019 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-2k22x"] Oct 01 13:47:00 crc kubenswrapper[4605]: I1001 13:47:00.149728 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-82p8n\" (UniqueName: \"kubernetes.io/projected/da66068e-3e86-487b-9a81-e4edd65c71cb-kube-api-access-82p8n\") pod \"dns-default-9h8zv\" (UID: \"da66068e-3e86-487b-9a81-e4edd65c71cb\") " pod="openshift-dns/dns-default-9h8zv" Oct 01 13:47:00 crc kubenswrapper[4605]: I1001 13:47:00.154692 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:47:00 crc kubenswrapper[4605]: E1001 13:47:00.155180 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 13:47:00.655167176 +0000 UTC m=+143.399143384 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ncbbt" (UID: "20fe9925-9f6b-4b69-a13d-e8ff88daaec6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:00 crc kubenswrapper[4605]: I1001 13:47:00.155678 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-cpppf" Oct 01 13:47:00 crc kubenswrapper[4605]: I1001 13:47:00.178268 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-w9b29" Oct 01 13:47:00 crc kubenswrapper[4605]: I1001 13:47:00.184923 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c4622426-9b45-4026-ac8a-3390697075ef-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-lhptc\" (UID: \"c4622426-9b45-4026-ac8a-3390697075ef\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lhptc" Oct 01 13:47:00 crc kubenswrapper[4605]: I1001 13:47:00.186501 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qkkbz\" (UniqueName: \"kubernetes.io/projected/238e77bf-d5db-463a-91b7-b6b8be0d642b-kube-api-access-qkkbz\") pod \"machine-config-operator-74547568cd-qqwf9\" (UID: \"238e77bf-d5db-463a-91b7-b6b8be0d642b\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-qqwf9" Oct 01 13:47:00 crc kubenswrapper[4605]: I1001 13:47:00.213042 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9dp4l\" (UniqueName: \"kubernetes.io/projected/9317edb7-8cf8-4045-95a9-196349d21055-kube-api-access-9dp4l\") pod \"marketplace-operator-79b997595-hv7vm\" (UID: \"9317edb7-8cf8-4045-95a9-196349d21055\") " pod="openshift-marketplace/marketplace-operator-79b997595-hv7vm" Oct 01 13:47:00 crc kubenswrapper[4605]: I1001 13:47:00.213657 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-9h8zv" Oct 01 13:47:00 crc kubenswrapper[4605]: I1001 13:47:00.221531 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-qmh5z"] Oct 01 13:47:00 crc kubenswrapper[4605]: I1001 13:47:00.223989 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t5ww6\" (UniqueName: \"kubernetes.io/projected/339fa1de-0cd5-4503-89c9-ff0ca9b0f8a9-kube-api-access-t5ww6\") pod \"router-default-5444994796-kdrdj\" (UID: \"339fa1de-0cd5-4503-89c9-ff0ca9b0f8a9\") " pod="openshift-ingress/router-default-5444994796-kdrdj" Oct 01 13:47:00 crc kubenswrapper[4605]: I1001 13:47:00.228354 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n2fpb\" (UniqueName: \"kubernetes.io/projected/fbfbaeff-08b6-47b1-b356-bf366cb0ed75-kube-api-access-n2fpb\") pod \"openshift-apiserver-operator-796bbdcf4f-gsjr2\" (UID: \"fbfbaeff-08b6-47b1-b356-bf366cb0ed75\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-gsjr2" Oct 01 13:47:00 crc kubenswrapper[4605]: I1001 13:47:00.238194 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-sfvwc" Oct 01 13:47:00 crc kubenswrapper[4605]: I1001 13:47:00.259167 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bgq8t\" (UniqueName: \"kubernetes.io/projected/1ad4e395-06d2-483c-a847-16df8df7ec3c-kube-api-access-bgq8t\") pod \"service-ca-9c57cc56f-hntgm\" (UID: \"1ad4e395-06d2-483c-a847-16df8df7ec3c\") " pod="openshift-service-ca/service-ca-9c57cc56f-hntgm" Oct 01 13:47:00 crc kubenswrapper[4605]: I1001 13:47:00.274019 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:47:00 crc kubenswrapper[4605]: I1001 13:47:00.274879 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2m9jl\" (UniqueName: \"kubernetes.io/projected/ea3e40db-79b6-4499-b7e6-71bd46c55663-kube-api-access-2m9jl\") pod \"collect-profiles-29322105-br8qr\" (UID: \"ea3e40db-79b6-4499-b7e6-71bd46c55663\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322105-br8qr" Oct 01 13:47:00 crc kubenswrapper[4605]: E1001 13:47:00.274976 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 13:47:00.774959179 +0000 UTC m=+143.518935377 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:00 crc kubenswrapper[4605]: I1001 13:47:00.275170 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:47:00 crc kubenswrapper[4605]: E1001 13:47:00.275591 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 13:47:00.775581625 +0000 UTC m=+143.519557833 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ncbbt" (UID: "20fe9925-9f6b-4b69-a13d-e8ff88daaec6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:00 crc kubenswrapper[4605]: I1001 13:47:00.294996 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6cwm7\" (UniqueName: \"kubernetes.io/projected/7058af57-9197-463d-8171-d1be29016903-kube-api-access-6cwm7\") pod \"kube-storage-version-migrator-operator-b67b599dd-n452f\" (UID: \"7058af57-9197-463d-8171-d1be29016903\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-n452f" Oct 01 13:47:00 crc kubenswrapper[4605]: I1001 13:47:00.307383 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2bmnh"] Oct 01 13:47:00 crc kubenswrapper[4605]: I1001 13:47:00.311181 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h9djv\" (UniqueName: \"kubernetes.io/projected/61dd9b83-5164-4f9d-b3b3-2599072d6593-kube-api-access-h9djv\") pod \"ingress-canary-kwml2\" (UID: \"61dd9b83-5164-4f9d-b3b3-2599072d6593\") " pod="openshift-ingress-canary/ingress-canary-kwml2" Oct 01 13:47:00 crc kubenswrapper[4605]: I1001 13:47:00.336151 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lbz86\" (UniqueName: \"kubernetes.io/projected/c910e340-5d0a-4d6d-a570-3c1a5d82231b-kube-api-access-lbz86\") pod \"csi-hostpathplugin-5fjl6\" (UID: \"c910e340-5d0a-4d6d-a570-3c1a5d82231b\") " pod="hostpath-provisioner/csi-hostpathplugin-5fjl6" Oct 01 13:47:00 crc kubenswrapper[4605]: I1001 13:47:00.347750 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8mkj9"] Oct 01 13:47:00 crc kubenswrapper[4605]: I1001 13:47:00.373417 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-5fjl6" Oct 01 13:47:00 crc kubenswrapper[4605]: I1001 13:47:00.376570 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:47:00 crc kubenswrapper[4605]: E1001 13:47:00.376939 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 13:47:00.87692314 +0000 UTC m=+143.620899348 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:00 crc kubenswrapper[4605]: I1001 13:47:00.409157 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-b84ph"] Oct 01 13:47:00 crc kubenswrapper[4605]: I1001 13:47:00.440894 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-kdrdj" Oct 01 13:47:00 crc kubenswrapper[4605]: I1001 13:47:00.452459 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-qqwf9" Oct 01 13:47:00 crc kubenswrapper[4605]: I1001 13:47:00.454442 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29322105-br8qr" Oct 01 13:47:00 crc kubenswrapper[4605]: I1001 13:47:00.462066 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-hv7vm" Oct 01 13:47:00 crc kubenswrapper[4605]: I1001 13:47:00.470481 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lhptc" Oct 01 13:47:00 crc kubenswrapper[4605]: I1001 13:47:00.475159 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-n452f" Oct 01 13:47:00 crc kubenswrapper[4605]: I1001 13:47:00.477440 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:47:00 crc kubenswrapper[4605]: E1001 13:47:00.477741 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 13:47:00.977726901 +0000 UTC m=+143.721703109 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ncbbt" (UID: "20fe9925-9f6b-4b69-a13d-e8ff88daaec6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:00 crc kubenswrapper[4605]: I1001 13:47:00.481954 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-hntgm" Oct 01 13:47:00 crc kubenswrapper[4605]: I1001 13:47:00.500356 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-5llhc"] Oct 01 13:47:00 crc kubenswrapper[4605]: I1001 13:47:00.500403 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-9v5mh"] Oct 01 13:47:00 crc kubenswrapper[4605]: I1001 13:47:00.501281 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-gsjr2" Oct 01 13:47:00 crc kubenswrapper[4605]: I1001 13:47:00.528442 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-kwml2" Oct 01 13:47:00 crc kubenswrapper[4605]: I1001 13:47:00.568579 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-8vpb7"] Oct 01 13:47:00 crc kubenswrapper[4605]: I1001 13:47:00.579837 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:47:00 crc kubenswrapper[4605]: E1001 13:47:00.580330 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 13:47:01.080309867 +0000 UTC m=+143.824286075 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:00 crc kubenswrapper[4605]: I1001 13:47:00.584924 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-lf6wn" podStartSLOduration=121.584904917 podStartE2EDuration="2m1.584904917s" podCreationTimestamp="2025-10-01 13:44:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 13:47:00.578275575 +0000 UTC m=+143.322251783" watchObservedRunningTime="2025-10-01 13:47:00.584904917 +0000 UTC m=+143.328881125" Oct 01 13:47:00 crc kubenswrapper[4605]: I1001 13:47:00.647158 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-wkzl6"] Oct 01 13:47:00 crc kubenswrapper[4605]: I1001 13:47:00.680980 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:47:00 crc kubenswrapper[4605]: E1001 13:47:00.681307 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 13:47:01.181292123 +0000 UTC m=+143.925268321 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ncbbt" (UID: "20fe9925-9f6b-4b69-a13d-e8ff88daaec6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:00 crc kubenswrapper[4605]: I1001 13:47:00.782753 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:47:00 crc kubenswrapper[4605]: E1001 13:47:00.783156 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 13:47:01.283129631 +0000 UTC m=+144.027105839 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:00 crc kubenswrapper[4605]: I1001 13:47:00.783563 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:47:00 crc kubenswrapper[4605]: E1001 13:47:00.784009 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 13:47:01.283998553 +0000 UTC m=+144.027974761 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ncbbt" (UID: "20fe9925-9f6b-4b69-a13d-e8ff88daaec6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:00 crc kubenswrapper[4605]: I1001 13:47:00.887722 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-jxwcz"] Oct 01 13:47:00 crc kubenswrapper[4605]: I1001 13:47:00.892322 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:47:00 crc kubenswrapper[4605]: E1001 13:47:00.892883 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 13:47:01.392866303 +0000 UTC m=+144.136842511 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:00 crc kubenswrapper[4605]: I1001 13:47:00.902563 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-5llhc" event={"ID":"33744436-5acf-4628-bde8-81e0b2029f1d","Type":"ContainerStarted","Data":"b54902b82b5e33dc4bfb51d80a76d80500f7d4937842744ee15768cc4056f674"} Oct 01 13:47:00 crc kubenswrapper[4605]: I1001 13:47:00.911638 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-2k22x" event={"ID":"762e836a-1722-4e01-982d-023b84748aa4","Type":"ContainerStarted","Data":"1f4904fcd298f2261c9e9e743081d4903c5ff50bbd17aa48a6bcf3ce55de26bb"} Oct 01 13:47:00 crc kubenswrapper[4605]: I1001 13:47:00.912513 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-j6crh"] Oct 01 13:47:00 crc kubenswrapper[4605]: I1001 13:47:00.926480 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-2bkjp" event={"ID":"c55cb7b3-eaaf-486b-940e-d6ffd06ce8da","Type":"ContainerStarted","Data":"4601273db687a208ad4d17c755fa1ba9ed6d7908e09f2af380b474c92b284cca"} Oct 01 13:47:00 crc kubenswrapper[4605]: I1001 13:47:00.926565 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-2bkjp" Oct 01 13:47:00 crc kubenswrapper[4605]: I1001 13:47:00.963662 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-b84ph" event={"ID":"e7df6d80-4ed7-4192-b079-bd0119903e10","Type":"ContainerStarted","Data":"23acb259f0a97df8602734d74973a0de6fa828c10726791d67d56e89648b9caa"} Oct 01 13:47:00 crc kubenswrapper[4605]: I1001 13:47:00.964246 4605 patch_prober.go:28] interesting pod/console-operator-58897d9998-2bkjp container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.13:8443/readyz\": dial tcp 10.217.0.13:8443: connect: connection refused" start-of-body= Oct 01 13:47:00 crc kubenswrapper[4605]: I1001 13:47:00.964296 4605 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-2bkjp" podUID="c55cb7b3-eaaf-486b-940e-d6ffd06ce8da" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.13:8443/readyz\": dial tcp 10.217.0.13:8443: connect: connection refused" Oct 01 13:47:00 crc kubenswrapper[4605]: I1001 13:47:00.981455 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pxc25" event={"ID":"04d0121b-ec20-4607-9a6b-89ccaa4a4d57","Type":"ContainerStarted","Data":"700bd345685af74246ce26d13dcffdb8a14168440b52b04babb00e9cf0025fce"} Oct 01 13:47:00 crc kubenswrapper[4605]: I1001 13:47:00.982968 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-9v5mh" event={"ID":"c0c81d24-bbca-4bc9-af4c-48f18da95147","Type":"ContainerStarted","Data":"a0dbe73f64aa9c7739f6c1870e3b2ca885fbba3be18776204e6f3d55f16e79e4"} Oct 01 13:47:01 crc kubenswrapper[4605]: I1001 13:47:00.998143 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:47:01 crc kubenswrapper[4605]: E1001 13:47:00.999836 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 13:47:01.499819283 +0000 UTC m=+144.243795491 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ncbbt" (UID: "20fe9925-9f6b-4b69-a13d-e8ff88daaec6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:01 crc kubenswrapper[4605]: I1001 13:47:01.010576 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-8vpb7" event={"ID":"6334be24-ba19-4acc-afdf-74a88a104fce","Type":"ContainerStarted","Data":"1a374d1819dd9810ab896ee159390e5f901892a0181713606154838925f51557"} Oct 01 13:47:01 crc kubenswrapper[4605]: I1001 13:47:01.023898 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-2brwf" event={"ID":"33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb","Type":"ContainerStarted","Data":"8b1c49b4f8ef06803e7667c271185bf17107f81dbc088cfc8a3aac43d4dc6b5e"} Oct 01 13:47:01 crc kubenswrapper[4605]: I1001 13:47:01.028800 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-6pn8g" event={"ID":"161a8f41-2f6c-49d4-9efe-4d27a50ed622","Type":"ContainerStarted","Data":"92148f30d0f178f33d18241199c4754315e99e3fd6f60fbe97ff40a83d7408ca"} Oct 01 13:47:01 crc kubenswrapper[4605]: I1001 13:47:01.049779 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-qmh5z" event={"ID":"9b2768b9-4054-4f67-a937-2050eebe9c2f","Type":"ContainerStarted","Data":"b342670dbb937991a60649d1ded2f3222de6c1e544f79857d14d38b77b99aeb5"} Oct 01 13:47:01 crc kubenswrapper[4605]: I1001 13:47:01.067060 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dpknn"] Oct 01 13:47:01 crc kubenswrapper[4605]: I1001 13:47:01.069479 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-48sxr" event={"ID":"0c93dc9b-fba0-4d28-b8b3-8def5f66d466","Type":"ContainerStarted","Data":"ea54fb8f54e645982727905ee38d2126bb8a9d61710686f564845f6f2b237ee3"} Oct 01 13:47:01 crc kubenswrapper[4605]: I1001 13:47:01.079829 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-wkzl6" event={"ID":"4f8a1899-74f1-4a04-b5e8-2b8a3b60ddd1","Type":"ContainerStarted","Data":"3b1f9b8722269ff0cedb4dd582bd3b80afe94eb10b7f1df7e8d8926860a716df"} Oct 01 13:47:01 crc kubenswrapper[4605]: I1001 13:47:01.094226 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8mkj9" event={"ID":"f21a4520-2f55-4e46-9243-287aa9ae7189","Type":"ContainerStarted","Data":"497adeb3fdf43261ac10d26debc8eb5988fa9ea10ede76686db3a0c79dc832e2"} Oct 01 13:47:01 crc kubenswrapper[4605]: I1001 13:47:01.103517 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:47:01 crc kubenswrapper[4605]: E1001 13:47:01.104026 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 13:47:01.604006751 +0000 UTC m=+144.347982959 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:01 crc kubenswrapper[4605]: I1001 13:47:01.205591 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:47:01 crc kubenswrapper[4605]: E1001 13:47:01.209189 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 13:47:01.709173455 +0000 UTC m=+144.453149653 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ncbbt" (UID: "20fe9925-9f6b-4b69-a13d-e8ff88daaec6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:01 crc kubenswrapper[4605]: I1001 13:47:01.244714 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-9h8zv"] Oct 01 13:47:01 crc kubenswrapper[4605]: I1001 13:47:01.275431 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-zv9xk" podStartSLOduration=122.275417321 podStartE2EDuration="2m2.275417321s" podCreationTimestamp="2025-10-01 13:44:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 13:47:01.274698212 +0000 UTC m=+144.018674420" watchObservedRunningTime="2025-10-01 13:47:01.275417321 +0000 UTC m=+144.019393529" Oct 01 13:47:01 crc kubenswrapper[4605]: I1001 13:47:01.311437 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:47:01 crc kubenswrapper[4605]: E1001 13:47:01.311728 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 13:47:01.811711981 +0000 UTC m=+144.555688189 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:01 crc kubenswrapper[4605]: I1001 13:47:01.374534 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-w9b29"] Oct 01 13:47:01 crc kubenswrapper[4605]: I1001 13:47:01.415467 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:47:01 crc kubenswrapper[4605]: E1001 13:47:01.415859 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 13:47:01.915844468 +0000 UTC m=+144.659820676 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ncbbt" (UID: "20fe9925-9f6b-4b69-a13d-e8ff88daaec6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:01 crc kubenswrapper[4605]: I1001 13:47:01.517646 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:47:01 crc kubenswrapper[4605]: E1001 13:47:01.518552 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 13:47:02.018533558 +0000 UTC m=+144.762509766 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:01 crc kubenswrapper[4605]: I1001 13:47:01.622514 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:47:01 crc kubenswrapper[4605]: E1001 13:47:01.623027 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 13:47:02.123013434 +0000 UTC m=+144.866989642 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ncbbt" (UID: "20fe9925-9f6b-4b69-a13d-e8ff88daaec6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:01 crc kubenswrapper[4605]: I1001 13:47:01.641608 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hfdjq" podStartSLOduration=121.641590135 podStartE2EDuration="2m1.641590135s" podCreationTimestamp="2025-10-01 13:45:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 13:47:01.640786514 +0000 UTC m=+144.384762722" watchObservedRunningTime="2025-10-01 13:47:01.641590135 +0000 UTC m=+144.385566343" Oct 01 13:47:01 crc kubenswrapper[4605]: I1001 13:47:01.724068 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:47:01 crc kubenswrapper[4605]: E1001 13:47:01.724593 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 13:47:02.224575014 +0000 UTC m=+144.968551222 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:01 crc kubenswrapper[4605]: I1001 13:47:01.744805 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-g59zg"] Oct 01 13:47:01 crc kubenswrapper[4605]: I1001 13:47:01.825213 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:47:01 crc kubenswrapper[4605]: E1001 13:47:01.825552 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 13:47:02.325540549 +0000 UTC m=+145.069516757 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ncbbt" (UID: "20fe9925-9f6b-4b69-a13d-e8ff88daaec6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:01 crc kubenswrapper[4605]: I1001 13:47:01.927652 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:47:01 crc kubenswrapper[4605]: E1001 13:47:01.928068 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 13:47:02.428040144 +0000 UTC m=+145.172016352 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:01 crc kubenswrapper[4605]: I1001 13:47:01.966856 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hhbgt"] Oct 01 13:47:02 crc kubenswrapper[4605]: I1001 13:47:02.019854 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-hq6j4"] Oct 01 13:47:02 crc kubenswrapper[4605]: I1001 13:47:02.031708 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:47:02 crc kubenswrapper[4605]: E1001 13:47:02.032167 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 13:47:02.5321545 +0000 UTC m=+145.276130708 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ncbbt" (UID: "20fe9925-9f6b-4b69-a13d-e8ff88daaec6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:02 crc kubenswrapper[4605]: I1001 13:47:02.046916 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-5fjl6"] Oct 01 13:47:02 crc kubenswrapper[4605]: I1001 13:47:02.127925 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-sfvwc" event={"ID":"c876f01f-3cca-49fd-98f1-3232fd165176","Type":"ContainerStarted","Data":"f55b40e4d3e539b43d448d050521c40772b9a7527a60ef8e3e05415641099ace"} Oct 01 13:47:02 crc kubenswrapper[4605]: I1001 13:47:02.134917 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:47:02 crc kubenswrapper[4605]: E1001 13:47:02.135352 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 13:47:02.635333393 +0000 UTC m=+145.379309601 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:02 crc kubenswrapper[4605]: I1001 13:47:02.135954 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-2bkjp" podStartSLOduration=123.135943579 podStartE2EDuration="2m3.135943579s" podCreationTimestamp="2025-10-01 13:44:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 13:47:02.135511167 +0000 UTC m=+144.879487375" watchObservedRunningTime="2025-10-01 13:47:02.135943579 +0000 UTC m=+144.879919777" Oct 01 13:47:02 crc kubenswrapper[4605]: I1001 13:47:02.150284 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8mkj9" event={"ID":"f21a4520-2f55-4e46-9243-287aa9ae7189","Type":"ContainerStarted","Data":"d4f6288c0415ac3e7ae0ef9ecbd869bbd466174bdf8041b5dc5374cbc01e0522"} Oct 01 13:47:02 crc kubenswrapper[4605]: I1001 13:47:02.152821 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dpknn" event={"ID":"263bef0b-5d5b-40e6-ac60-ff6c4d4217af","Type":"ContainerStarted","Data":"92256d92a2d880ca96bb6596ac18b161315f45509bf2296e390f8f656e512c3e"} Oct 01 13:47:02 crc kubenswrapper[4605]: I1001 13:47:02.163958 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-trd6j" event={"ID":"7f6de1d9-61b5-4cc6-a820-5492052b60ef","Type":"ContainerStarted","Data":"c887456e4902033c6f73330006a16d65ecd4b64d7f8f191c2772990db88e0747"} Oct 01 13:47:02 crc kubenswrapper[4605]: I1001 13:47:02.164902 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-trd6j" Oct 01 13:47:02 crc kubenswrapper[4605]: I1001 13:47:02.180351 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-6pn8g" podStartSLOduration=123.180336658 podStartE2EDuration="2m3.180336658s" podCreationTimestamp="2025-10-01 13:44:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 13:47:02.17770704 +0000 UTC m=+144.921683248" watchObservedRunningTime="2025-10-01 13:47:02.180336658 +0000 UTC m=+144.924312866" Oct 01 13:47:02 crc kubenswrapper[4605]: I1001 13:47:02.181312 4605 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-trd6j container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.11:6443/healthz\": dial tcp 10.217.0.11:6443: connect: connection refused" start-of-body= Oct 01 13:47:02 crc kubenswrapper[4605]: I1001 13:47:02.181359 4605 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-trd6j" podUID="7f6de1d9-61b5-4cc6-a820-5492052b60ef" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.11:6443/healthz\": dial tcp 10.217.0.11:6443: connect: connection refused" Oct 01 13:47:02 crc kubenswrapper[4605]: I1001 13:47:02.201935 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-j6crh" event={"ID":"6a1bba23-826a-4f52-8c53-f7364363a6f7","Type":"ContainerStarted","Data":"7dc4fea803ad621f01c172d2357f8c3917010edc025cf63524d3899661ebc845"} Oct 01 13:47:02 crc kubenswrapper[4605]: I1001 13:47:02.225298 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-jxwcz" event={"ID":"27c52669-b56d-45ea-a605-015e92a313e6","Type":"ContainerStarted","Data":"c2422a1ce14f92c50d40df5bfecc5b8fa6322ea9820fb9aafe2c4197c5dee219"} Oct 01 13:47:02 crc kubenswrapper[4605]: I1001 13:47:02.230953 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8mkj9" podStartSLOduration=123.230936499 podStartE2EDuration="2m3.230936499s" podCreationTimestamp="2025-10-01 13:44:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 13:47:02.228061964 +0000 UTC m=+144.972038172" watchObservedRunningTime="2025-10-01 13:47:02.230936499 +0000 UTC m=+144.974912707" Oct 01 13:47:02 crc kubenswrapper[4605]: I1001 13:47:02.231223 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rx67q"] Oct 01 13:47:02 crc kubenswrapper[4605]: I1001 13:47:02.240667 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-cpppf"] Oct 01 13:47:02 crc kubenswrapper[4605]: I1001 13:47:02.244266 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:47:02 crc kubenswrapper[4605]: E1001 13:47:02.246725 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 13:47:02.746712068 +0000 UTC m=+145.490688276 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ncbbt" (UID: "20fe9925-9f6b-4b69-a13d-e8ff88daaec6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:02 crc kubenswrapper[4605]: I1001 13:47:02.297247 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-g59zg" event={"ID":"ebc0f2e4-730c-4957-bf35-df11f426f04e","Type":"ContainerStarted","Data":"990128b7a9ffa3ed01afae05a44fe82a6e68d50c6c5e1b9b50b583fc1487b951"} Oct 01 13:47:02 crc kubenswrapper[4605]: I1001 13:47:02.303177 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-w9b29" event={"ID":"ee291562-056e-4ca5-b326-a412ba18d5db","Type":"ContainerStarted","Data":"ad2cd77dfbc72b4b4362782b87e7b6adc16174411b426796c0f057a6bd053ef4"} Oct 01 13:47:02 crc kubenswrapper[4605]: I1001 13:47:02.312043 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-frtv2"] Oct 01 13:47:02 crc kubenswrapper[4605]: I1001 13:47:02.329272 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-9h8zv" event={"ID":"da66068e-3e86-487b-9a81-e4edd65c71cb","Type":"ContainerStarted","Data":"86d2e5353ddbca02edefaf6bb9960cf91caee9e7f4d52cf0292d126bb65da6ca"} Oct 01 13:47:02 crc kubenswrapper[4605]: I1001 13:47:02.345733 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:47:02 crc kubenswrapper[4605]: E1001 13:47:02.353531 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 13:47:02.853501683 +0000 UTC m=+145.597477891 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:02 crc kubenswrapper[4605]: I1001 13:47:02.388727 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2bmnh" event={"ID":"82715fa2-9367-44b6-a3b4-54b01d4865f1","Type":"ContainerStarted","Data":"ecc23f3b01cb57800ace1b8d3789fe4469f0665923d24a4f30c578ff71477291"} Oct 01 13:47:02 crc kubenswrapper[4605]: I1001 13:47:02.393595 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-48sxr" Oct 01 13:47:02 crc kubenswrapper[4605]: I1001 13:47:02.411888 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-kdrdj" event={"ID":"339fa1de-0cd5-4503-89c9-ff0ca9b0f8a9","Type":"ContainerStarted","Data":"59e8a3a6b90745d9a4727205b5a54179505381d7fc288c7c83fcbe356c1b16ed"} Oct 01 13:47:02 crc kubenswrapper[4605]: I1001 13:47:02.412909 4605 patch_prober.go:28] interesting pod/console-operator-58897d9998-2bkjp container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.13:8443/readyz\": dial tcp 10.217.0.13:8443: connect: connection refused" start-of-body= Oct 01 13:47:02 crc kubenswrapper[4605]: I1001 13:47:02.412941 4605 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-2bkjp" podUID="c55cb7b3-eaaf-486b-940e-d6ffd06ce8da" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.13:8443/readyz\": dial tcp 10.217.0.13:8443: connect: connection refused" Oct 01 13:47:02 crc kubenswrapper[4605]: I1001 13:47:02.419572 4605 patch_prober.go:28] interesting pod/downloads-7954f5f757-48sxr container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Oct 01 13:47:02 crc kubenswrapper[4605]: I1001 13:47:02.419605 4605 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-48sxr" podUID="0c93dc9b-fba0-4d28-b8b3-8def5f66d466" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Oct 01 13:47:02 crc kubenswrapper[4605]: I1001 13:47:02.447507 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-trd6j" podStartSLOduration=123.447492558 podStartE2EDuration="2m3.447492558s" podCreationTimestamp="2025-10-01 13:44:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 13:47:02.305753447 +0000 UTC m=+145.049729645" watchObservedRunningTime="2025-10-01 13:47:02.447492558 +0000 UTC m=+145.191468766" Oct 01 13:47:02 crc kubenswrapper[4605]: I1001 13:47:02.453071 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:47:02 crc kubenswrapper[4605]: E1001 13:47:02.453432 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 13:47:02.953422201 +0000 UTC m=+145.697398409 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ncbbt" (UID: "20fe9925-9f6b-4b69-a13d-e8ff88daaec6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:02 crc kubenswrapper[4605]: I1001 13:47:02.472296 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-48sxr" podStartSLOduration=123.47227058 podStartE2EDuration="2m3.47227058s" podCreationTimestamp="2025-10-01 13:44:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 13:47:02.446616735 +0000 UTC m=+145.190592943" watchObservedRunningTime="2025-10-01 13:47:02.47227058 +0000 UTC m=+145.216246788" Oct 01 13:47:02 crc kubenswrapper[4605]: I1001 13:47:02.497685 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-gsjr2"] Oct 01 13:47:02 crc kubenswrapper[4605]: I1001 13:47:02.545073 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-pwm5c"] Oct 01 13:47:02 crc kubenswrapper[4605]: I1001 13:47:02.556658 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:47:02 crc kubenswrapper[4605]: E1001 13:47:02.557846 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 13:47:03.057829646 +0000 UTC m=+145.801805854 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:02 crc kubenswrapper[4605]: W1001 13:47:02.618584 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf455f5fd_4c5c_487c_9d5a_f86a5c16ae20.slice/crio-b5e811b56970900fbd34fd294dc6408b807b7694235bd2a841db31e8cc958ac7 WatchSource:0}: Error finding container b5e811b56970900fbd34fd294dc6408b807b7694235bd2a841db31e8cc958ac7: Status 404 returned error can't find the container with id b5e811b56970900fbd34fd294dc6408b807b7694235bd2a841db31e8cc958ac7 Oct 01 13:47:02 crc kubenswrapper[4605]: W1001 13:47:02.639261 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd71c176f_21e9_4ec4_ba8f_ae4a58649b73.slice/crio-e327868bab97fb882ab1dbbec7aac7d8dd43d97af34de9afd1cb54d2a785d843 WatchSource:0}: Error finding container e327868bab97fb882ab1dbbec7aac7d8dd43d97af34de9afd1cb54d2a785d843: Status 404 returned error can't find the container with id e327868bab97fb882ab1dbbec7aac7d8dd43d97af34de9afd1cb54d2a785d843 Oct 01 13:47:02 crc kubenswrapper[4605]: I1001 13:47:02.660989 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:47:02 crc kubenswrapper[4605]: E1001 13:47:02.661509 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 13:47:03.16149573 +0000 UTC m=+145.905471938 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ncbbt" (UID: "20fe9925-9f6b-4b69-a13d-e8ff88daaec6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:02 crc kubenswrapper[4605]: I1001 13:47:02.673007 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lhptc"] Oct 01 13:47:02 crc kubenswrapper[4605]: I1001 13:47:02.762478 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:47:02 crc kubenswrapper[4605]: E1001 13:47:02.765424 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 13:47:03.265403532 +0000 UTC m=+146.009379740 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:02 crc kubenswrapper[4605]: I1001 13:47:02.828966 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-hntgm"] Oct 01 13:47:02 crc kubenswrapper[4605]: I1001 13:47:02.866842 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:47:02 crc kubenswrapper[4605]: E1001 13:47:02.867455 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 13:47:03.367431154 +0000 UTC m=+146.111407362 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ncbbt" (UID: "20fe9925-9f6b-4b69-a13d-e8ff88daaec6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:02 crc kubenswrapper[4605]: I1001 13:47:02.913788 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-qqwf9"] Oct 01 13:47:02 crc kubenswrapper[4605]: I1001 13:47:02.952557 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29322105-br8qr"] Oct 01 13:47:02 crc kubenswrapper[4605]: I1001 13:47:02.970964 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-n452f"] Oct 01 13:47:02 crc kubenswrapper[4605]: I1001 13:47:02.972550 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:47:02 crc kubenswrapper[4605]: E1001 13:47:02.972881 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 13:47:03.472864705 +0000 UTC m=+146.216840913 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:03 crc kubenswrapper[4605]: I1001 13:47:03.024948 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-hv7vm"] Oct 01 13:47:03 crc kubenswrapper[4605]: I1001 13:47:03.026341 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-kwml2"] Oct 01 13:47:03 crc kubenswrapper[4605]: W1001 13:47:03.045658 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7058af57_9197_463d_8171_d1be29016903.slice/crio-b8d2b1641865ba8920a3bee0251909a65d4dc9da53ec24d1662449450129d805 WatchSource:0}: Error finding container b8d2b1641865ba8920a3bee0251909a65d4dc9da53ec24d1662449450129d805: Status 404 returned error can't find the container with id b8d2b1641865ba8920a3bee0251909a65d4dc9da53ec24d1662449450129d805 Oct 01 13:47:03 crc kubenswrapper[4605]: W1001 13:47:03.061586 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9317edb7_8cf8_4045_95a9_196349d21055.slice/crio-5d075d7a237a127cea87374eed17f67c54cf153d8e443cc6ecdfcb7c1a4da08d WatchSource:0}: Error finding container 5d075d7a237a127cea87374eed17f67c54cf153d8e443cc6ecdfcb7c1a4da08d: Status 404 returned error can't find the container with id 5d075d7a237a127cea87374eed17f67c54cf153d8e443cc6ecdfcb7c1a4da08d Oct 01 13:47:03 crc kubenswrapper[4605]: W1001 13:47:03.072743 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod61dd9b83_5164_4f9d_b3b3_2599072d6593.slice/crio-779660398e94587202b2939084101f648275a86f179c4610a084555d53521bb0 WatchSource:0}: Error finding container 779660398e94587202b2939084101f648275a86f179c4610a084555d53521bb0: Status 404 returned error can't find the container with id 779660398e94587202b2939084101f648275a86f179c4610a084555d53521bb0 Oct 01 13:47:03 crc kubenswrapper[4605]: I1001 13:47:03.074409 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:47:03 crc kubenswrapper[4605]: E1001 13:47:03.074908 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 13:47:03.574895658 +0000 UTC m=+146.318871866 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ncbbt" (UID: "20fe9925-9f6b-4b69-a13d-e8ff88daaec6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:03 crc kubenswrapper[4605]: I1001 13:47:03.175488 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:47:03 crc kubenswrapper[4605]: E1001 13:47:03.175645 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 13:47:03.675620256 +0000 UTC m=+146.419596464 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:03 crc kubenswrapper[4605]: I1001 13:47:03.176167 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:47:03 crc kubenswrapper[4605]: E1001 13:47:03.176517 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 13:47:03.676506489 +0000 UTC m=+146.420482697 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ncbbt" (UID: "20fe9925-9f6b-4b69-a13d-e8ff88daaec6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:03 crc kubenswrapper[4605]: I1001 13:47:03.277257 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:47:03 crc kubenswrapper[4605]: E1001 13:47:03.277605 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 13:47:03.777586987 +0000 UTC m=+146.521563195 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:03 crc kubenswrapper[4605]: I1001 13:47:03.378567 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:47:03 crc kubenswrapper[4605]: E1001 13:47:03.378872 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 13:47:03.87886078 +0000 UTC m=+146.622836988 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ncbbt" (UID: "20fe9925-9f6b-4b69-a13d-e8ff88daaec6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:03 crc kubenswrapper[4605]: I1001 13:47:03.428049 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-5fjl6" event={"ID":"c910e340-5d0a-4d6d-a570-3c1a5d82231b","Type":"ContainerStarted","Data":"bf48c8c12275f9edcedeb2a5bfc7f6809a1a8c12a7b51b330b80832e395f35b0"} Oct 01 13:47:03 crc kubenswrapper[4605]: I1001 13:47:03.430643 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-9v5mh" event={"ID":"c0c81d24-bbca-4bc9-af4c-48f18da95147","Type":"ContainerStarted","Data":"83b0e7555dea236b17d9d8c4fff4afa570aa018c16a181f260f4c9d7414e0dc9"} Oct 01 13:47:03 crc kubenswrapper[4605]: I1001 13:47:03.435484 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-2brwf" event={"ID":"33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb","Type":"ContainerStarted","Data":"884c3fbb1455cc3222ababfb497105e23d4e5ef404a1c43d849793d293c8c3ac"} Oct 01 13:47:03 crc kubenswrapper[4605]: I1001 13:47:03.436604 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-n452f" event={"ID":"7058af57-9197-463d-8171-d1be29016903","Type":"ContainerStarted","Data":"b8d2b1641865ba8920a3bee0251909a65d4dc9da53ec24d1662449450129d805"} Oct 01 13:47:03 crc kubenswrapper[4605]: I1001 13:47:03.438531 4605 generic.go:334] "Generic (PLEG): container finished" podID="04d0121b-ec20-4607-9a6b-89ccaa4a4d57" containerID="42e23471d1f6772e37e3c0cbbadcee4f4359b063b12d944bf648b8f9decabaee" exitCode=0 Oct 01 13:47:03 crc kubenswrapper[4605]: I1001 13:47:03.438570 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pxc25" event={"ID":"04d0121b-ec20-4607-9a6b-89ccaa4a4d57","Type":"ContainerDied","Data":"42e23471d1f6772e37e3c0cbbadcee4f4359b063b12d944bf648b8f9decabaee"} Oct 01 13:47:03 crc kubenswrapper[4605]: I1001 13:47:03.441746 4605 generic.go:334] "Generic (PLEG): container finished" podID="e7df6d80-4ed7-4192-b079-bd0119903e10" containerID="3965b04be6dde15d1471bd4d7234565fb96ef5aa48ec62cc97af59cad039f1f5" exitCode=0 Oct 01 13:47:03 crc kubenswrapper[4605]: I1001 13:47:03.441806 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-b84ph" event={"ID":"e7df6d80-4ed7-4192-b079-bd0119903e10","Type":"ContainerDied","Data":"3965b04be6dde15d1471bd4d7234565fb96ef5aa48ec62cc97af59cad039f1f5"} Oct 01 13:47:03 crc kubenswrapper[4605]: I1001 13:47:03.448228 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-kwml2" event={"ID":"61dd9b83-5164-4f9d-b3b3-2599072d6593","Type":"ContainerStarted","Data":"779660398e94587202b2939084101f648275a86f179c4610a084555d53521bb0"} Oct 01 13:47:03 crc kubenswrapper[4605]: I1001 13:47:03.453515 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-9v5mh" podStartSLOduration=123.453478313 podStartE2EDuration="2m3.453478313s" podCreationTimestamp="2025-10-01 13:45:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 13:47:03.451700867 +0000 UTC m=+146.195677075" watchObservedRunningTime="2025-10-01 13:47:03.453478313 +0000 UTC m=+146.197454541" Oct 01 13:47:03 crc kubenswrapper[4605]: I1001 13:47:03.457160 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-qqwf9" event={"ID":"238e77bf-d5db-463a-91b7-b6b8be0d642b","Type":"ContainerStarted","Data":"074519c23ec386a462b64f56d0c32540680dbd2b3b049959a66cf32c4655bdaf"} Oct 01 13:47:03 crc kubenswrapper[4605]: I1001 13:47:03.463845 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-2k22x" event={"ID":"762e836a-1722-4e01-982d-023b84748aa4","Type":"ContainerStarted","Data":"f0c9f7154850a1b7d209509ffd87bf4f742aaaf889203715c0b3ff67ef11f74f"} Oct 01 13:47:03 crc kubenswrapper[4605]: I1001 13:47:03.470734 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2bmnh" event={"ID":"82715fa2-9367-44b6-a3b4-54b01d4865f1","Type":"ContainerStarted","Data":"5b3a6837435517a7e4cd663d41f897336e0de6e435818a247770e65293034fc6"} Oct 01 13:47:03 crc kubenswrapper[4605]: I1001 13:47:03.478764 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-sfvwc" event={"ID":"c876f01f-3cca-49fd-98f1-3232fd165176","Type":"ContainerStarted","Data":"ce7cb5f93c3d2765ddf66c3c4290841b88de46301a92825492ccedb232f69560"} Oct 01 13:47:03 crc kubenswrapper[4605]: I1001 13:47:03.480351 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:47:03 crc kubenswrapper[4605]: I1001 13:47:03.480526 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-2brwf" podStartSLOduration=124.480505133 podStartE2EDuration="2m4.480505133s" podCreationTimestamp="2025-10-01 13:44:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 13:47:03.47998611 +0000 UTC m=+146.223962318" watchObservedRunningTime="2025-10-01 13:47:03.480505133 +0000 UTC m=+146.224481341" Oct 01 13:47:03 crc kubenswrapper[4605]: E1001 13:47:03.480811 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 13:47:03.98078706 +0000 UTC m=+146.724763268 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:03 crc kubenswrapper[4605]: I1001 13:47:03.516623 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rx67q" event={"ID":"2517f8ad-404f-470e-a785-c4c90c2cdc0d","Type":"ContainerStarted","Data":"3c497b8e7ea039f122579901fcf98eccb91f78881d50d38642c0372754b0be8d"} Oct 01 13:47:03 crc kubenswrapper[4605]: I1001 13:47:03.523292 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-cpppf" event={"ID":"d71c176f-21e9-4ec4-ba8f-ae4a58649b73","Type":"ContainerStarted","Data":"e327868bab97fb882ab1dbbec7aac7d8dd43d97af34de9afd1cb54d2a785d843"} Oct 01 13:47:03 crc kubenswrapper[4605]: I1001 13:47:03.545054 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29322105-br8qr" event={"ID":"ea3e40db-79b6-4499-b7e6-71bd46c55663","Type":"ContainerStarted","Data":"edafa7bbe010f55576837f8b98c55515e20cd51478079aa696d8b2b3b68a968d"} Oct 01 13:47:03 crc kubenswrapper[4605]: I1001 13:47:03.555551 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-frtv2" event={"ID":"f455f5fd-4c5c-487c-9d5a-f86a5c16ae20","Type":"ContainerStarted","Data":"b5e811b56970900fbd34fd294dc6408b807b7694235bd2a841db31e8cc958ac7"} Oct 01 13:47:03 crc kubenswrapper[4605]: I1001 13:47:03.556722 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-hntgm" event={"ID":"1ad4e395-06d2-483c-a847-16df8df7ec3c","Type":"ContainerStarted","Data":"ed069545411b9107c18e5dfe2895c2798394d8e0885c90194aa01b7327c6ac3c"} Oct 01 13:47:03 crc kubenswrapper[4605]: I1001 13:47:03.557467 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lhptc" event={"ID":"c4622426-9b45-4026-ac8a-3390697075ef","Type":"ContainerStarted","Data":"81fdd019ff560e5b209dfec24a166da7ef1e60880d1b89801988b39e7189108a"} Oct 01 13:47:03 crc kubenswrapper[4605]: I1001 13:47:03.558866 4605 generic.go:334] "Generic (PLEG): container finished" podID="9b2768b9-4054-4f67-a937-2050eebe9c2f" containerID="017f97631178409fb3acd591b6f05fdb489a4fc63b0eb0d18ff31f3b366ed949" exitCode=0 Oct 01 13:47:03 crc kubenswrapper[4605]: I1001 13:47:03.558909 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-qmh5z" event={"ID":"9b2768b9-4054-4f67-a937-2050eebe9c2f","Type":"ContainerDied","Data":"017f97631178409fb3acd591b6f05fdb489a4fc63b0eb0d18ff31f3b366ed949"} Oct 01 13:47:03 crc kubenswrapper[4605]: I1001 13:47:03.560913 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-gsjr2" event={"ID":"fbfbaeff-08b6-47b1-b356-bf366cb0ed75","Type":"ContainerStarted","Data":"a6cb66ad29d418190223c82be4b37b8bec3b2954dd7b6e68b984ddab04072da4"} Oct 01 13:47:03 crc kubenswrapper[4605]: I1001 13:47:03.561757 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-pwm5c" event={"ID":"16842335-be0d-4f69-b0af-e98b21c572ab","Type":"ContainerStarted","Data":"c68fae9c8506604df3450d4d03db48a08da66f46d8e43639e38d7316d99b36f8"} Oct 01 13:47:03 crc kubenswrapper[4605]: I1001 13:47:03.563086 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-jxwcz" event={"ID":"27c52669-b56d-45ea-a605-015e92a313e6","Type":"ContainerStarted","Data":"22004350ad30fbf434a22497d252bd73a8edf2d5be43c4a21ffc46cc465ebdc0"} Oct 01 13:47:03 crc kubenswrapper[4605]: I1001 13:47:03.567192 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-hq6j4" event={"ID":"828f308f-0b92-4019-ab24-96477d0b6a47","Type":"ContainerStarted","Data":"ca2368cd8f0025dfbf061bb78dc846c03998fc285abf36ce8a45e5cd2765a67d"} Oct 01 13:47:03 crc kubenswrapper[4605]: I1001 13:47:03.573428 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hhbgt" event={"ID":"5271f9c4-f151-4e96-b831-93459666b524","Type":"ContainerStarted","Data":"62c03c5b8356bfa2c660218b778898708fe1ba48b3c6515ab3ed0498bb7da290"} Oct 01 13:47:03 crc kubenswrapper[4605]: I1001 13:47:03.583028 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-48sxr" event={"ID":"0c93dc9b-fba0-4d28-b8b3-8def5f66d466","Type":"ContainerStarted","Data":"f1516e23a93bead988e4da895be70e1b0460974bd0a1de685428cce218a143f4"} Oct 01 13:47:03 crc kubenswrapper[4605]: I1001 13:47:03.584709 4605 patch_prober.go:28] interesting pod/downloads-7954f5f757-48sxr container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Oct 01 13:47:03 crc kubenswrapper[4605]: I1001 13:47:03.584831 4605 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-48sxr" podUID="0c93dc9b-fba0-4d28-b8b3-8def5f66d466" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Oct 01 13:47:03 crc kubenswrapper[4605]: I1001 13:47:03.586075 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:47:03 crc kubenswrapper[4605]: I1001 13:47:03.587807 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-hv7vm" event={"ID":"9317edb7-8cf8-4045-95a9-196349d21055","Type":"ContainerStarted","Data":"5d075d7a237a127cea87374eed17f67c54cf153d8e443cc6ecdfcb7c1a4da08d"} Oct 01 13:47:03 crc kubenswrapper[4605]: E1001 13:47:03.587918 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 13:47:04.087889093 +0000 UTC m=+146.831865301 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ncbbt" (UID: "20fe9925-9f6b-4b69-a13d-e8ff88daaec6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:03 crc kubenswrapper[4605]: I1001 13:47:03.588895 4605 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-trd6j container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.11:6443/healthz\": dial tcp 10.217.0.11:6443: connect: connection refused" start-of-body= Oct 01 13:47:03 crc kubenswrapper[4605]: I1001 13:47:03.588955 4605 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-trd6j" podUID="7f6de1d9-61b5-4cc6-a820-5492052b60ef" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.11:6443/healthz\": dial tcp 10.217.0.11:6443: connect: connection refused" Oct 01 13:47:03 crc kubenswrapper[4605]: I1001 13:47:03.691590 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:47:03 crc kubenswrapper[4605]: E1001 13:47:03.693257 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 13:47:04.193241732 +0000 UTC m=+146.937217940 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:03 crc kubenswrapper[4605]: I1001 13:47:03.793984 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:47:03 crc kubenswrapper[4605]: E1001 13:47:03.794455 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 13:47:04.294439123 +0000 UTC m=+147.038415331 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ncbbt" (UID: "20fe9925-9f6b-4b69-a13d-e8ff88daaec6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:03 crc kubenswrapper[4605]: I1001 13:47:03.895646 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:47:03 crc kubenswrapper[4605]: E1001 13:47:03.895741 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 13:47:04.395723176 +0000 UTC m=+147.139699384 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:03 crc kubenswrapper[4605]: I1001 13:47:03.896248 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:47:03 crc kubenswrapper[4605]: E1001 13:47:03.896588 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 13:47:04.396574338 +0000 UTC m=+147.140550546 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ncbbt" (UID: "20fe9925-9f6b-4b69-a13d-e8ff88daaec6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:03 crc kubenswrapper[4605]: I1001 13:47:03.997801 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:47:03 crc kubenswrapper[4605]: E1001 13:47:03.998169 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 13:47:04.498153239 +0000 UTC m=+147.242129447 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:04 crc kubenswrapper[4605]: I1001 13:47:04.101008 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:47:04 crc kubenswrapper[4605]: E1001 13:47:04.103549 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 13:47:04.603529718 +0000 UTC m=+147.347505926 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ncbbt" (UID: "20fe9925-9f6b-4b69-a13d-e8ff88daaec6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:04 crc kubenswrapper[4605]: I1001 13:47:04.203249 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:47:04 crc kubenswrapper[4605]: E1001 13:47:04.203723 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 13:47:04.703704143 +0000 UTC m=+147.447680351 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:04 crc kubenswrapper[4605]: I1001 13:47:04.305022 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:47:04 crc kubenswrapper[4605]: E1001 13:47:04.305875 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 13:47:04.805862009 +0000 UTC m=+147.549838217 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ncbbt" (UID: "20fe9925-9f6b-4b69-a13d-e8ff88daaec6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:04 crc kubenswrapper[4605]: I1001 13:47:04.406302 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:47:04 crc kubenswrapper[4605]: E1001 13:47:04.407145 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 13:47:04.907124661 +0000 UTC m=+147.651100869 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:04 crc kubenswrapper[4605]: I1001 13:47:04.509916 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:47:04 crc kubenswrapper[4605]: E1001 13:47:04.510318 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 13:47:05.010304014 +0000 UTC m=+147.754280222 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ncbbt" (UID: "20fe9925-9f6b-4b69-a13d-e8ff88daaec6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:04 crc kubenswrapper[4605]: I1001 13:47:04.611588 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:47:04 crc kubenswrapper[4605]: E1001 13:47:04.612449 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 13:47:05.112434079 +0000 UTC m=+147.856410287 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:04 crc kubenswrapper[4605]: I1001 13:47:04.625973 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-w9b29" event={"ID":"ee291562-056e-4ca5-b326-a412ba18d5db","Type":"ContainerStarted","Data":"89f7f1b93250443c22c562556488b6ddc2ccceb289fabae37330da99db01c488"} Oct 01 13:47:04 crc kubenswrapper[4605]: I1001 13:47:04.629761 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rx67q" event={"ID":"2517f8ad-404f-470e-a785-c4c90c2cdc0d","Type":"ContainerStarted","Data":"a0ecd836a42b1058fad1e4718510648014d3934ee5d6d68eed4e0c1a65261c1f"} Oct 01 13:47:04 crc kubenswrapper[4605]: I1001 13:47:04.631295 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rx67q" Oct 01 13:47:04 crc kubenswrapper[4605]: I1001 13:47:04.633747 4605 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-rx67q container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.16:8443/healthz\": dial tcp 10.217.0.16:8443: connect: connection refused" start-of-body= Oct 01 13:47:04 crc kubenswrapper[4605]: I1001 13:47:04.633844 4605 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rx67q" podUID="2517f8ad-404f-470e-a785-c4c90c2cdc0d" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.16:8443/healthz\": dial tcp 10.217.0.16:8443: connect: connection refused" Oct 01 13:47:04 crc kubenswrapper[4605]: I1001 13:47:04.640425 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-5llhc" event={"ID":"33744436-5acf-4628-bde8-81e0b2029f1d","Type":"ContainerStarted","Data":"ca768a6f38640c908f852d5e7534e4274eb4d5677097791c93d484e7aa119536"} Oct 01 13:47:04 crc kubenswrapper[4605]: I1001 13:47:04.655509 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-cpppf" event={"ID":"d71c176f-21e9-4ec4-ba8f-ae4a58649b73","Type":"ContainerStarted","Data":"05a8f324b2df361bb55173c0fe7408bc1115db53a045eb11d4c460a9c190fa07"} Oct 01 13:47:04 crc kubenswrapper[4605]: I1001 13:47:04.655953 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rx67q" podStartSLOduration=124.655917165 podStartE2EDuration="2m4.655917165s" podCreationTimestamp="2025-10-01 13:45:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 13:47:04.65261557 +0000 UTC m=+147.396591768" watchObservedRunningTime="2025-10-01 13:47:04.655917165 +0000 UTC m=+147.399893373" Oct 01 13:47:04 crc kubenswrapper[4605]: I1001 13:47:04.663182 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-hntgm" event={"ID":"1ad4e395-06d2-483c-a847-16df8df7ec3c","Type":"ContainerStarted","Data":"3b6c617bef68e89c2b5ab2e57ee9984bd4199ff93c14c9dbcae7c664f354ac3c"} Oct 01 13:47:04 crc kubenswrapper[4605]: I1001 13:47:04.671952 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-5llhc" podStartSLOduration=125.67192743 podStartE2EDuration="2m5.67192743s" podCreationTimestamp="2025-10-01 13:44:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 13:47:04.669406175 +0000 UTC m=+147.413382383" watchObservedRunningTime="2025-10-01 13:47:04.67192743 +0000 UTC m=+147.415903638" Oct 01 13:47:04 crc kubenswrapper[4605]: I1001 13:47:04.683446 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-b84ph" event={"ID":"e7df6d80-4ed7-4192-b079-bd0119903e10","Type":"ContainerStarted","Data":"0646fb84f4ab6192f93e2e4eeae56b84b3080275634b6941554cc80c06c742f9"} Oct 01 13:47:04 crc kubenswrapper[4605]: I1001 13:47:04.694272 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-pwm5c" event={"ID":"16842335-be0d-4f69-b0af-e98b21c572ab","Type":"ContainerStarted","Data":"637d8327da6947ac37168e5b75c5c5afea04316881de7a820dd1686d0331d60d"} Oct 01 13:47:04 crc kubenswrapper[4605]: I1001 13:47:04.713630 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29322105-br8qr" event={"ID":"ea3e40db-79b6-4499-b7e6-71bd46c55663","Type":"ContainerStarted","Data":"7541ddd5837ac92c9e1d93dfba42f821bf4ae79da8f3d4c3a1e40a4700fbd9e8"} Oct 01 13:47:04 crc kubenswrapper[4605]: I1001 13:47:04.725839 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-hntgm" podStartSLOduration=124.725820006 podStartE2EDuration="2m4.725820006s" podCreationTimestamp="2025-10-01 13:45:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 13:47:04.725796475 +0000 UTC m=+147.469772683" watchObservedRunningTime="2025-10-01 13:47:04.725820006 +0000 UTC m=+147.469796224" Oct 01 13:47:04 crc kubenswrapper[4605]: I1001 13:47:04.726181 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:47:04 crc kubenswrapper[4605]: E1001 13:47:04.730496 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 13:47:05.230479616 +0000 UTC m=+147.974455824 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ncbbt" (UID: "20fe9925-9f6b-4b69-a13d-e8ff88daaec6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:04 crc kubenswrapper[4605]: I1001 13:47:04.732428 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-n452f" event={"ID":"7058af57-9197-463d-8171-d1be29016903","Type":"ContainerStarted","Data":"84727d07e7a8ca6693e38b02aa9d0d6a7848c0037a87e50c0a04b06d63f87ed2"} Oct 01 13:47:04 crc kubenswrapper[4605]: I1001 13:47:04.768016 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29322105-br8qr" podStartSLOduration=124.767997488 podStartE2EDuration="2m4.767997488s" podCreationTimestamp="2025-10-01 13:45:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 13:47:04.767572297 +0000 UTC m=+147.511548505" watchObservedRunningTime="2025-10-01 13:47:04.767997488 +0000 UTC m=+147.511973696" Oct 01 13:47:04 crc kubenswrapper[4605]: I1001 13:47:04.825862 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-n452f" podStartSLOduration=124.825844826 podStartE2EDuration="2m4.825844826s" podCreationTimestamp="2025-10-01 13:45:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 13:47:04.822851369 +0000 UTC m=+147.566827577" watchObservedRunningTime="2025-10-01 13:47:04.825844826 +0000 UTC m=+147.569821034" Oct 01 13:47:04 crc kubenswrapper[4605]: I1001 13:47:04.826926 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-2k22x" event={"ID":"762e836a-1722-4e01-982d-023b84748aa4","Type":"ContainerStarted","Data":"40c480ca056ba9d2927e709c3ee0968e54eec5068ab1b0741e2667b6a2697085"} Oct 01 13:47:04 crc kubenswrapper[4605]: I1001 13:47:04.827766 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:47:04 crc kubenswrapper[4605]: E1001 13:47:04.827813 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 13:47:05.327801407 +0000 UTC m=+148.071777615 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:04 crc kubenswrapper[4605]: I1001 13:47:04.832672 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:47:04 crc kubenswrapper[4605]: E1001 13:47:04.834159 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 13:47:05.334142541 +0000 UTC m=+148.078118749 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ncbbt" (UID: "20fe9925-9f6b-4b69-a13d-e8ff88daaec6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:04 crc kubenswrapper[4605]: I1001 13:47:04.849262 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pxc25" event={"ID":"04d0121b-ec20-4607-9a6b-89ccaa4a4d57","Type":"ContainerStarted","Data":"170aa14cfbcd111a69889108ed0843d3faaa1b5cbd2a4391f0640deee8fe6162"} Oct 01 13:47:04 crc kubenswrapper[4605]: I1001 13:47:04.860165 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-2k22x" podStartSLOduration=124.860146225 podStartE2EDuration="2m4.860146225s" podCreationTimestamp="2025-10-01 13:45:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 13:47:04.857945768 +0000 UTC m=+147.601921986" watchObservedRunningTime="2025-10-01 13:47:04.860146225 +0000 UTC m=+147.604122433" Oct 01 13:47:04 crc kubenswrapper[4605]: I1001 13:47:04.873232 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-hv7vm" event={"ID":"9317edb7-8cf8-4045-95a9-196349d21055","Type":"ContainerStarted","Data":"8e6d09105b00404725a167134f3a289849f386c047d04b9b24ca50f6884d980c"} Oct 01 13:47:04 crc kubenswrapper[4605]: I1001 13:47:04.873790 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-hv7vm" Oct 01 13:47:04 crc kubenswrapper[4605]: I1001 13:47:04.893515 4605 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-hv7vm container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.42:8080/healthz\": dial tcp 10.217.0.42:8080: connect: connection refused" start-of-body= Oct 01 13:47:04 crc kubenswrapper[4605]: I1001 13:47:04.894080 4605 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-hv7vm" podUID="9317edb7-8cf8-4045-95a9-196349d21055" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.42:8080/healthz\": dial tcp 10.217.0.42:8080: connect: connection refused" Oct 01 13:47:04 crc kubenswrapper[4605]: I1001 13:47:04.936139 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:47:04 crc kubenswrapper[4605]: E1001 13:47:04.937403 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 13:47:05.437387665 +0000 UTC m=+148.181363873 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:04 crc kubenswrapper[4605]: I1001 13:47:04.941856 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-wkzl6" event={"ID":"4f8a1899-74f1-4a04-b5e8-2b8a3b60ddd1","Type":"ContainerStarted","Data":"30fbc99fbd92496e59f972ef3256c80521215f6208d30c0b7fa3fcdad74050b9"} Oct 01 13:47:04 crc kubenswrapper[4605]: I1001 13:47:04.954838 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-qmh5z" event={"ID":"9b2768b9-4054-4f67-a937-2050eebe9c2f","Type":"ContainerStarted","Data":"538cea2fba7f8b00344f9b31d58babdf94f3ea0fd5e9b6bff2f6b1d972142bd1"} Oct 01 13:47:04 crc kubenswrapper[4605]: I1001 13:47:04.960169 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-hv7vm" podStartSLOduration=124.960149095 podStartE2EDuration="2m4.960149095s" podCreationTimestamp="2025-10-01 13:45:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 13:47:04.959119598 +0000 UTC m=+147.703095806" watchObservedRunningTime="2025-10-01 13:47:04.960149095 +0000 UTC m=+147.704125323" Oct 01 13:47:04 crc kubenswrapper[4605]: I1001 13:47:04.960318 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pxc25" podStartSLOduration=124.960312519 podStartE2EDuration="2m4.960312519s" podCreationTimestamp="2025-10-01 13:45:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 13:47:04.908682142 +0000 UTC m=+147.652658350" watchObservedRunningTime="2025-10-01 13:47:04.960312519 +0000 UTC m=+147.704288727" Oct 01 13:47:04 crc kubenswrapper[4605]: I1001 13:47:04.978335 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-gsjr2" event={"ID":"fbfbaeff-08b6-47b1-b356-bf366cb0ed75","Type":"ContainerStarted","Data":"173bd7a195b94f787c28dd87e27d8f3ae4e5cae47012150bc77c9826016a9237"} Oct 01 13:47:04 crc kubenswrapper[4605]: I1001 13:47:04.982797 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-qqwf9" event={"ID":"238e77bf-d5db-463a-91b7-b6b8be0d642b","Type":"ContainerStarted","Data":"abb830052e7179b3bfc7a39f761ff348aa78a3101e750170edf4c55183cbd99a"} Oct 01 13:47:05 crc kubenswrapper[4605]: I1001 13:47:05.002982 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hhbgt" event={"ID":"5271f9c4-f151-4e96-b831-93459666b524","Type":"ContainerStarted","Data":"482134b5cd1443c8117255fea4ee362367c81abf5aab990499e6986129ba44c8"} Oct 01 13:47:05 crc kubenswrapper[4605]: I1001 13:47:05.028692 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-kwml2" event={"ID":"61dd9b83-5164-4f9d-b3b3-2599072d6593","Type":"ContainerStarted","Data":"e383303422767bd25f63f10b73341815fcd0a4845ff9b65e34d6143bc240d3f0"} Oct 01 13:47:05 crc kubenswrapper[4605]: I1001 13:47:05.043696 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:47:05 crc kubenswrapper[4605]: I1001 13:47:05.044513 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-gsjr2" podStartSLOduration=125.04450095 podStartE2EDuration="2m5.04450095s" podCreationTimestamp="2025-10-01 13:45:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 13:47:05.011839324 +0000 UTC m=+147.755815532" watchObservedRunningTime="2025-10-01 13:47:05.04450095 +0000 UTC m=+147.788477158" Oct 01 13:47:05 crc kubenswrapper[4605]: E1001 13:47:05.047789 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 13:47:05.547773724 +0000 UTC m=+148.291749932 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ncbbt" (UID: "20fe9925-9f6b-4b69-a13d-e8ff88daaec6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:05 crc kubenswrapper[4605]: I1001 13:47:05.057204 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-8vpb7" event={"ID":"6334be24-ba19-4acc-afdf-74a88a104fce","Type":"ContainerStarted","Data":"8e0d469bc516c8f1241b276695761d0ccd26c84638989488b953119bc7b913ea"} Oct 01 13:47:05 crc kubenswrapper[4605]: I1001 13:47:05.063944 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lhptc" event={"ID":"c4622426-9b45-4026-ac8a-3390697075ef","Type":"ContainerStarted","Data":"bf25cc57708292b2c9a1d7f5f6d97e83df0e322c8f90d9d7a7dd2a316eb16119"} Oct 01 13:47:05 crc kubenswrapper[4605]: I1001 13:47:05.072370 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-kdrdj" event={"ID":"339fa1de-0cd5-4503-89c9-ff0ca9b0f8a9","Type":"ContainerStarted","Data":"996c3af173346bbd5d9fb1058bb4b56369696b7ff3b0ac0d96847a441c020a8e"} Oct 01 13:47:05 crc kubenswrapper[4605]: I1001 13:47:05.083353 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-kwml2" podStartSLOduration=8.083331355 podStartE2EDuration="8.083331355s" podCreationTimestamp="2025-10-01 13:46:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 13:47:05.082323439 +0000 UTC m=+147.826299647" watchObservedRunningTime="2025-10-01 13:47:05.083331355 +0000 UTC m=+147.827307583" Oct 01 13:47:05 crc kubenswrapper[4605]: I1001 13:47:05.083897 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-g59zg" event={"ID":"ebc0f2e4-730c-4957-bf35-df11f426f04e","Type":"ContainerStarted","Data":"ace6529f84ea6358be9fb469bca15c30866d635916ba15d04df73cc9aa72ff0a"} Oct 01 13:47:05 crc kubenswrapper[4605]: I1001 13:47:05.083997 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hhbgt" podStartSLOduration=126.083990272 podStartE2EDuration="2m6.083990272s" podCreationTimestamp="2025-10-01 13:44:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 13:47:05.056204683 +0000 UTC m=+147.800180881" watchObservedRunningTime="2025-10-01 13:47:05.083990272 +0000 UTC m=+147.827966470" Oct 01 13:47:05 crc kubenswrapper[4605]: I1001 13:47:05.088578 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-9h8zv" event={"ID":"da66068e-3e86-487b-9a81-e4edd65c71cb","Type":"ContainerStarted","Data":"a662e07ee699b7cea9eaa2c5fed4e5dcfa87112027aa7717e605a7e119ed4102"} Oct 01 13:47:05 crc kubenswrapper[4605]: I1001 13:47:05.092575 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-9h8zv" Oct 01 13:47:05 crc kubenswrapper[4605]: I1001 13:47:05.122693 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-j6crh" event={"ID":"6a1bba23-826a-4f52-8c53-f7364363a6f7","Type":"ContainerStarted","Data":"867a1506e9b42b2bb8f4dcee768fafd5000a8b3f1cec947b8bc7ec2bf606f37e"} Oct 01 13:47:05 crc kubenswrapper[4605]: I1001 13:47:05.134849 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-kdrdj" podStartSLOduration=126.134833029 podStartE2EDuration="2m6.134833029s" podCreationTimestamp="2025-10-01 13:44:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 13:47:05.13291742 +0000 UTC m=+147.876893628" watchObservedRunningTime="2025-10-01 13:47:05.134833029 +0000 UTC m=+147.878809227" Oct 01 13:47:05 crc kubenswrapper[4605]: I1001 13:47:05.135499 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dpknn" event={"ID":"263bef0b-5d5b-40e6-ac60-ff6c4d4217af","Type":"ContainerStarted","Data":"b20479f85a686fb9301135ee7d010ffa0b18d0fdbf9d5af5656272d194af5636"} Oct 01 13:47:05 crc kubenswrapper[4605]: I1001 13:47:05.136776 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dpknn" Oct 01 13:47:05 crc kubenswrapper[4605]: I1001 13:47:05.144130 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-frtv2" event={"ID":"f455f5fd-4c5c-487c-9d5a-f86a5c16ae20","Type":"ContainerStarted","Data":"2e5194072b58658bd3a1af46111b2b6510ca3e8ec39aeb8584545d2e4dbef6e0"} Oct 01 13:47:05 crc kubenswrapper[4605]: I1001 13:47:05.146282 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:47:05 crc kubenswrapper[4605]: I1001 13:47:05.147461 4605 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-dpknn container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.22:5443/healthz\": dial tcp 10.217.0.22:5443: connect: connection refused" start-of-body= Oct 01 13:47:05 crc kubenswrapper[4605]: I1001 13:47:05.147512 4605 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dpknn" podUID="263bef0b-5d5b-40e6-ac60-ff6c4d4217af" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.22:5443/healthz\": dial tcp 10.217.0.22:5443: connect: connection refused" Oct 01 13:47:05 crc kubenswrapper[4605]: I1001 13:47:05.147627 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-hq6j4" event={"ID":"828f308f-0b92-4019-ab24-96477d0b6a47","Type":"ContainerStarted","Data":"1232ae0754b742702dc63749c13814d258e840e75a53deca42249e89cd60c4ec"} Oct 01 13:47:05 crc kubenswrapper[4605]: I1001 13:47:05.147663 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-hq6j4" Oct 01 13:47:05 crc kubenswrapper[4605]: E1001 13:47:05.148066 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 13:47:05.648044981 +0000 UTC m=+148.392021199 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:05 crc kubenswrapper[4605]: I1001 13:47:05.148455 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:47:05 crc kubenswrapper[4605]: E1001 13:47:05.151526 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 13:47:05.651510051 +0000 UTC m=+148.395486339 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ncbbt" (UID: "20fe9925-9f6b-4b69-a13d-e8ff88daaec6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:05 crc kubenswrapper[4605]: I1001 13:47:05.156634 4605 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-hq6j4 container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.29:8443/healthz\": dial tcp 10.217.0.29:8443: connect: connection refused" start-of-body= Oct 01 13:47:05 crc kubenswrapper[4605]: I1001 13:47:05.156680 4605 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-hq6j4" podUID="828f308f-0b92-4019-ab24-96477d0b6a47" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.29:8443/healthz\": dial tcp 10.217.0.29:8443: connect: connection refused" Oct 01 13:47:05 crc kubenswrapper[4605]: I1001 13:47:05.161702 4605 patch_prober.go:28] interesting pod/downloads-7954f5f757-48sxr container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Oct 01 13:47:05 crc kubenswrapper[4605]: I1001 13:47:05.161742 4605 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-48sxr" podUID="0c93dc9b-fba0-4d28-b8b3-8def5f66d466" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Oct 01 13:47:05 crc kubenswrapper[4605]: I1001 13:47:05.214602 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lhptc" podStartSLOduration=125.214585915 podStartE2EDuration="2m5.214585915s" podCreationTimestamp="2025-10-01 13:45:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 13:47:05.160309399 +0000 UTC m=+147.904285607" watchObservedRunningTime="2025-10-01 13:47:05.214585915 +0000 UTC m=+147.958562123" Oct 01 13:47:05 crc kubenswrapper[4605]: I1001 13:47:05.216048 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-hq6j4" podStartSLOduration=125.216044253 podStartE2EDuration="2m5.216044253s" podCreationTimestamp="2025-10-01 13:45:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 13:47:05.212059179 +0000 UTC m=+147.956035377" watchObservedRunningTime="2025-10-01 13:47:05.216044253 +0000 UTC m=+147.960020461" Oct 01 13:47:05 crc kubenswrapper[4605]: I1001 13:47:05.249671 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:47:05 crc kubenswrapper[4605]: I1001 13:47:05.249696 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-frtv2" podStartSLOduration=125.249669024 podStartE2EDuration="2m5.249669024s" podCreationTimestamp="2025-10-01 13:45:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 13:47:05.247511988 +0000 UTC m=+147.991488196" watchObservedRunningTime="2025-10-01 13:47:05.249669024 +0000 UTC m=+147.993645232" Oct 01 13:47:05 crc kubenswrapper[4605]: E1001 13:47:05.250721 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 13:47:05.75070437 +0000 UTC m=+148.494680578 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:05 crc kubenswrapper[4605]: I1001 13:47:05.286296 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-sfvwc" podStartSLOduration=8.286275712 podStartE2EDuration="8.286275712s" podCreationTimestamp="2025-10-01 13:46:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 13:47:05.285769409 +0000 UTC m=+148.029745617" watchObservedRunningTime="2025-10-01 13:47:05.286275712 +0000 UTC m=+148.030251920" Oct 01 13:47:05 crc kubenswrapper[4605]: I1001 13:47:05.354302 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:47:05 crc kubenswrapper[4605]: E1001 13:47:05.354662 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 13:47:05.854633482 +0000 UTC m=+148.598609690 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ncbbt" (UID: "20fe9925-9f6b-4b69-a13d-e8ff88daaec6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:05 crc kubenswrapper[4605]: I1001 13:47:05.394746 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-g59zg" podStartSLOduration=125.39472421 podStartE2EDuration="2m5.39472421s" podCreationTimestamp="2025-10-01 13:45:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 13:47:05.332715004 +0000 UTC m=+148.076691212" watchObservedRunningTime="2025-10-01 13:47:05.39472421 +0000 UTC m=+148.138700418" Oct 01 13:47:05 crc kubenswrapper[4605]: I1001 13:47:05.429535 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-9h8zv" podStartSLOduration=9.429510241 podStartE2EDuration="9.429510241s" podCreationTimestamp="2025-10-01 13:46:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 13:47:05.401306941 +0000 UTC m=+148.145283149" watchObservedRunningTime="2025-10-01 13:47:05.429510241 +0000 UTC m=+148.173486449" Oct 01 13:47:05 crc kubenswrapper[4605]: I1001 13:47:05.431201 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dpknn" podStartSLOduration=125.431191495 podStartE2EDuration="2m5.431191495s" podCreationTimestamp="2025-10-01 13:45:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 13:47:05.429493891 +0000 UTC m=+148.173470099" watchObservedRunningTime="2025-10-01 13:47:05.431191495 +0000 UTC m=+148.175167713" Oct 01 13:47:05 crc kubenswrapper[4605]: I1001 13:47:05.441540 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-kdrdj" Oct 01 13:47:05 crc kubenswrapper[4605]: I1001 13:47:05.444493 4605 patch_prober.go:28] interesting pod/router-default-5444994796-kdrdj container/router namespace/openshift-ingress: Startup probe status=failure output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" start-of-body= Oct 01 13:47:05 crc kubenswrapper[4605]: I1001 13:47:05.444576 4605 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-kdrdj" podUID="339fa1de-0cd5-4503-89c9-ff0ca9b0f8a9" containerName="router" probeResult="failure" output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" Oct 01 13:47:05 crc kubenswrapper[4605]: I1001 13:47:05.456410 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:47:05 crc kubenswrapper[4605]: E1001 13:47:05.456621 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 13:47:05.956581823 +0000 UTC m=+148.700558041 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:05 crc kubenswrapper[4605]: I1001 13:47:05.456816 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:47:05 crc kubenswrapper[4605]: E1001 13:47:05.457245 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 13:47:05.95723574 +0000 UTC m=+148.701211948 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ncbbt" (UID: "20fe9925-9f6b-4b69-a13d-e8ff88daaec6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:05 crc kubenswrapper[4605]: I1001 13:47:05.474215 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-j6crh" podStartSLOduration=126.474191859 podStartE2EDuration="2m6.474191859s" podCreationTimestamp="2025-10-01 13:44:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 13:47:05.473033249 +0000 UTC m=+148.217009467" watchObservedRunningTime="2025-10-01 13:47:05.474191859 +0000 UTC m=+148.218168067" Oct 01 13:47:05 crc kubenswrapper[4605]: I1001 13:47:05.558364 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:47:05 crc kubenswrapper[4605]: E1001 13:47:05.558546 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 13:47:06.058516893 +0000 UTC m=+148.802493091 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:05 crc kubenswrapper[4605]: I1001 13:47:05.558866 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:47:05 crc kubenswrapper[4605]: E1001 13:47:05.559370 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 13:47:06.059350524 +0000 UTC m=+148.803326742 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ncbbt" (UID: "20fe9925-9f6b-4b69-a13d-e8ff88daaec6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:05 crc kubenswrapper[4605]: I1001 13:47:05.659916 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:47:05 crc kubenswrapper[4605]: E1001 13:47:05.660179 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 13:47:06.160146115 +0000 UTC m=+148.904122323 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:05 crc kubenswrapper[4605]: I1001 13:47:05.660738 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:47:05 crc kubenswrapper[4605]: E1001 13:47:05.661033 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 13:47:06.161023388 +0000 UTC m=+148.904999596 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ncbbt" (UID: "20fe9925-9f6b-4b69-a13d-e8ff88daaec6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:05 crc kubenswrapper[4605]: I1001 13:47:05.761473 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:47:05 crc kubenswrapper[4605]: E1001 13:47:05.761891 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 13:47:06.261861189 +0000 UTC m=+149.005837397 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:05 crc kubenswrapper[4605]: I1001 13:47:05.862693 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:47:05 crc kubenswrapper[4605]: I1001 13:47:05.863188 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 13:47:05 crc kubenswrapper[4605]: I1001 13:47:05.863239 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:47:05 crc kubenswrapper[4605]: I1001 13:47:05.863274 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:47:05 crc kubenswrapper[4605]: I1001 13:47:05.863302 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 13:47:05 crc kubenswrapper[4605]: E1001 13:47:05.864312 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 13:47:06.364276762 +0000 UTC m=+149.108253000 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ncbbt" (UID: "20fe9925-9f6b-4b69-a13d-e8ff88daaec6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:05 crc kubenswrapper[4605]: I1001 13:47:05.865291 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:47:05 crc kubenswrapper[4605]: I1001 13:47:05.880741 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 13:47:05 crc kubenswrapper[4605]: I1001 13:47:05.883471 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:47:05 crc kubenswrapper[4605]: I1001 13:47:05.886300 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 13:47:05 crc kubenswrapper[4605]: I1001 13:47:05.964309 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:47:05 crc kubenswrapper[4605]: E1001 13:47:05.964437 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 13:47:06.464413646 +0000 UTC m=+149.208389854 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:05 crc kubenswrapper[4605]: I1001 13:47:05.964695 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:47:05 crc kubenswrapper[4605]: E1001 13:47:05.965020 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 13:47:06.465013091 +0000 UTC m=+149.208989299 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ncbbt" (UID: "20fe9925-9f6b-4b69-a13d-e8ff88daaec6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:06 crc kubenswrapper[4605]: I1001 13:47:06.065485 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:47:06 crc kubenswrapper[4605]: E1001 13:47:06.065760 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 13:47:06.56574646 +0000 UTC m=+149.309722668 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:06 crc kubenswrapper[4605]: I1001 13:47:06.139949 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 13:47:06 crc kubenswrapper[4605]: I1001 13:47:06.147398 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 13:47:06 crc kubenswrapper[4605]: I1001 13:47:06.153851 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 13:47:06 crc kubenswrapper[4605]: I1001 13:47:06.154435 4605 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-trd6j container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.11:6443/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Oct 01 13:47:06 crc kubenswrapper[4605]: I1001 13:47:06.154476 4605 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-trd6j" podUID="7f6de1d9-61b5-4cc6-a820-5492052b60ef" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.11:6443/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 01 13:47:06 crc kubenswrapper[4605]: I1001 13:47:06.167915 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:47:06 crc kubenswrapper[4605]: E1001 13:47:06.168298 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 13:47:06.668285616 +0000 UTC m=+149.412261824 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ncbbt" (UID: "20fe9925-9f6b-4b69-a13d-e8ff88daaec6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:06 crc kubenswrapper[4605]: I1001 13:47:06.170184 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-qqwf9" event={"ID":"238e77bf-d5db-463a-91b7-b6b8be0d642b","Type":"ContainerStarted","Data":"bca0c6bdc4f0892cc0f863ee1b041df1d5389308927b447587b70aba5f14f0d0"} Oct 01 13:47:06 crc kubenswrapper[4605]: I1001 13:47:06.175020 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-cpppf" event={"ID":"d71c176f-21e9-4ec4-ba8f-ae4a58649b73","Type":"ContainerStarted","Data":"aef6ff980dedeb6011ee227c4f8472125624f205789712d809cccd8b1b99addf"} Oct 01 13:47:06 crc kubenswrapper[4605]: I1001 13:47:06.178672 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-5fjl6" event={"ID":"c910e340-5d0a-4d6d-a570-3c1a5d82231b","Type":"ContainerStarted","Data":"8c547478be14f54113ca790749da1793d936c42ba04505fc45e11abeaf0c41b7"} Oct 01 13:47:06 crc kubenswrapper[4605]: I1001 13:47:06.196472 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-j6crh" event={"ID":"6a1bba23-826a-4f52-8c53-f7364363a6f7","Type":"ContainerStarted","Data":"bc50a5cbe80108ab16826600aaad655ba0bea4cf61d64b4a2db57345fdb05d29"} Oct 01 13:47:06 crc kubenswrapper[4605]: I1001 13:47:06.213904 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-jxwcz" event={"ID":"27c52669-b56d-45ea-a605-015e92a313e6","Type":"ContainerStarted","Data":"bba0a6c18e0495e1f80a086dbb496cb1bafd080878b0c967e7448656f19f0f4f"} Oct 01 13:47:06 crc kubenswrapper[4605]: I1001 13:47:06.214735 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-jxwcz" Oct 01 13:47:06 crc kubenswrapper[4605]: I1001 13:47:06.230695 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-qqwf9" podStartSLOduration=126.230679472 podStartE2EDuration="2m6.230679472s" podCreationTimestamp="2025-10-01 13:45:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 13:47:06.229230554 +0000 UTC m=+148.973206762" watchObservedRunningTime="2025-10-01 13:47:06.230679472 +0000 UTC m=+148.974655680" Oct 01 13:47:06 crc kubenswrapper[4605]: I1001 13:47:06.231257 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2bmnh" event={"ID":"82715fa2-9367-44b6-a3b4-54b01d4865f1","Type":"ContainerStarted","Data":"90c0f6f71582e42afe5f36446cce95efb882a3ec98cfc3010f94232f23c29256"} Oct 01 13:47:06 crc kubenswrapper[4605]: I1001 13:47:06.247217 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-qmh5z" event={"ID":"9b2768b9-4054-4f67-a937-2050eebe9c2f","Type":"ContainerStarted","Data":"6e3b2fcabe4d48073e84e4c6039d219528a28847270316d2eb194ce40ebd738d"} Oct 01 13:47:06 crc kubenswrapper[4605]: I1001 13:47:06.260992 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-w9b29" event={"ID":"ee291562-056e-4ca5-b326-a412ba18d5db","Type":"ContainerStarted","Data":"84013d54d1784163032187ae4f7b18b89a4db8dcf433b4e1fe609caf8926edf8"} Oct 01 13:47:06 crc kubenswrapper[4605]: I1001 13:47:06.269458 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:47:06 crc kubenswrapper[4605]: E1001 13:47:06.270892 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 13:47:06.770875363 +0000 UTC m=+149.514851571 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:06 crc kubenswrapper[4605]: I1001 13:47:06.288743 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-9h8zv" event={"ID":"da66068e-3e86-487b-9a81-e4edd65c71cb","Type":"ContainerStarted","Data":"7a262c655b9aa2a88eea0ebceb6c0fe5a66901daace57289efb3cae9123d772a"} Oct 01 13:47:06 crc kubenswrapper[4605]: I1001 13:47:06.290529 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-jxwcz" podStartSLOduration=126.290511782 podStartE2EDuration="2m6.290511782s" podCreationTimestamp="2025-10-01 13:45:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 13:47:06.287966606 +0000 UTC m=+149.031942814" watchObservedRunningTime="2025-10-01 13:47:06.290511782 +0000 UTC m=+149.034487990" Oct 01 13:47:06 crc kubenswrapper[4605]: I1001 13:47:06.338346 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-wkzl6" event={"ID":"4f8a1899-74f1-4a04-b5e8-2b8a3b60ddd1","Type":"ContainerStarted","Data":"f4191634c4f7435da4d12ad7b71a4202df473f0f4b8b5d8adcb2453fdcdf9f05"} Oct 01 13:47:06 crc kubenswrapper[4605]: I1001 13:47:06.371710 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:47:06 crc kubenswrapper[4605]: E1001 13:47:06.372777 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 13:47:06.872765462 +0000 UTC m=+149.616741670 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ncbbt" (UID: "20fe9925-9f6b-4b69-a13d-e8ff88daaec6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:06 crc kubenswrapper[4605]: I1001 13:47:06.374399 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-8vpb7" event={"ID":"6334be24-ba19-4acc-afdf-74a88a104fce","Type":"ContainerStarted","Data":"fcdde259f0d367ef5bbe6df018aaf10d508524ffcec815377f01ed05d09c1fbe"} Oct 01 13:47:06 crc kubenswrapper[4605]: I1001 13:47:06.375021 4605 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-hq6j4 container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.29:8443/healthz\": dial tcp 10.217.0.29:8443: connect: connection refused" start-of-body= Oct 01 13:47:06 crc kubenswrapper[4605]: I1001 13:47:06.375056 4605 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-hq6j4" podUID="828f308f-0b92-4019-ab24-96477d0b6a47" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.29:8443/healthz\": dial tcp 10.217.0.29:8443: connect: connection refused" Oct 01 13:47:06 crc kubenswrapper[4605]: I1001 13:47:06.375360 4605 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-hv7vm container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.42:8080/healthz\": dial tcp 10.217.0.42:8080: connect: connection refused" start-of-body= Oct 01 13:47:06 crc kubenswrapper[4605]: I1001 13:47:06.375386 4605 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-hv7vm" podUID="9317edb7-8cf8-4045-95a9-196349d21055" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.42:8080/healthz\": dial tcp 10.217.0.42:8080: connect: connection refused" Oct 01 13:47:06 crc kubenswrapper[4605]: I1001 13:47:06.375462 4605 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-dpknn container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.22:5443/healthz\": dial tcp 10.217.0.22:5443: connect: connection refused" start-of-body= Oct 01 13:47:06 crc kubenswrapper[4605]: I1001 13:47:06.375486 4605 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dpknn" podUID="263bef0b-5d5b-40e6-ac60-ff6c4d4217af" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.22:5443/healthz\": dial tcp 10.217.0.22:5443: connect: connection refused" Oct 01 13:47:06 crc kubenswrapper[4605]: I1001 13:47:06.375596 4605 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-rx67q container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.16:8443/healthz\": dial tcp 10.217.0.16:8443: connect: connection refused" start-of-body= Oct 01 13:47:06 crc kubenswrapper[4605]: I1001 13:47:06.375614 4605 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rx67q" podUID="2517f8ad-404f-470e-a785-c4c90c2cdc0d" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.16:8443/healthz\": dial tcp 10.217.0.16:8443: connect: connection refused" Oct 01 13:47:06 crc kubenswrapper[4605]: I1001 13:47:06.388405 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-w9b29" podStartSLOduration=126.388387237 podStartE2EDuration="2m6.388387237s" podCreationTimestamp="2025-10-01 13:45:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 13:47:06.378983803 +0000 UTC m=+149.122960011" watchObservedRunningTime="2025-10-01 13:47:06.388387237 +0000 UTC m=+149.132363445" Oct 01 13:47:06 crc kubenswrapper[4605]: I1001 13:47:06.389428 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-cpppf" podStartSLOduration=126.389422543 podStartE2EDuration="2m6.389422543s" podCreationTimestamp="2025-10-01 13:45:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 13:47:06.322332376 +0000 UTC m=+149.066308604" watchObservedRunningTime="2025-10-01 13:47:06.389422543 +0000 UTC m=+149.133398751" Oct 01 13:47:06 crc kubenswrapper[4605]: I1001 13:47:06.445486 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-qmh5z" podStartSLOduration=126.445467465 podStartE2EDuration="2m6.445467465s" podCreationTimestamp="2025-10-01 13:45:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 13:47:06.444385477 +0000 UTC m=+149.188361695" watchObservedRunningTime="2025-10-01 13:47:06.445467465 +0000 UTC m=+149.189443673" Oct 01 13:47:06 crc kubenswrapper[4605]: I1001 13:47:06.454958 4605 patch_prober.go:28] interesting pod/router-default-5444994796-kdrdj container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 01 13:47:06 crc kubenswrapper[4605]: [-]has-synced failed: reason withheld Oct 01 13:47:06 crc kubenswrapper[4605]: [+]process-running ok Oct 01 13:47:06 crc kubenswrapper[4605]: healthz check failed Oct 01 13:47:06 crc kubenswrapper[4605]: I1001 13:47:06.455017 4605 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-kdrdj" podUID="339fa1de-0cd5-4503-89c9-ff0ca9b0f8a9" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 01 13:47:06 crc kubenswrapper[4605]: I1001 13:47:06.472926 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:47:06 crc kubenswrapper[4605]: E1001 13:47:06.474931 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 13:47:06.974916398 +0000 UTC m=+149.718892606 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:06 crc kubenswrapper[4605]: I1001 13:47:06.532802 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2bmnh" podStartSLOduration=127.532781476 podStartE2EDuration="2m7.532781476s" podCreationTimestamp="2025-10-01 13:44:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 13:47:06.474702002 +0000 UTC m=+149.218678210" watchObservedRunningTime="2025-10-01 13:47:06.532781476 +0000 UTC m=+149.276757684" Oct 01 13:47:06 crc kubenswrapper[4605]: I1001 13:47:06.534820 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-b84ph" podStartSLOduration=127.534808979 podStartE2EDuration="2m7.534808979s" podCreationTimestamp="2025-10-01 13:44:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 13:47:06.534522231 +0000 UTC m=+149.278498439" watchObservedRunningTime="2025-10-01 13:47:06.534808979 +0000 UTC m=+149.278785187" Oct 01 13:47:06 crc kubenswrapper[4605]: I1001 13:47:06.574951 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:47:06 crc kubenswrapper[4605]: E1001 13:47:06.575266 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 13:47:07.075254436 +0000 UTC m=+149.819230634 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ncbbt" (UID: "20fe9925-9f6b-4b69-a13d-e8ff88daaec6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:06 crc kubenswrapper[4605]: I1001 13:47:06.600010 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-wkzl6" podStartSLOduration=126.599991157 podStartE2EDuration="2m6.599991157s" podCreationTimestamp="2025-10-01 13:45:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 13:47:06.591638681 +0000 UTC m=+149.335614889" watchObservedRunningTime="2025-10-01 13:47:06.599991157 +0000 UTC m=+149.343967365" Oct 01 13:47:06 crc kubenswrapper[4605]: I1001 13:47:06.675871 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:47:06 crc kubenswrapper[4605]: E1001 13:47:06.676017 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 13:47:07.175985275 +0000 UTC m=+149.919961493 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:06 crc kubenswrapper[4605]: I1001 13:47:06.676242 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:47:06 crc kubenswrapper[4605]: E1001 13:47:06.676533 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 13:47:07.176521239 +0000 UTC m=+149.920497447 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ncbbt" (UID: "20fe9925-9f6b-4b69-a13d-e8ff88daaec6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:06 crc kubenswrapper[4605]: I1001 13:47:06.704726 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-8vpb7" podStartSLOduration=127.704707779 podStartE2EDuration="2m7.704707779s" podCreationTimestamp="2025-10-01 13:44:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 13:47:06.692812791 +0000 UTC m=+149.436788999" watchObservedRunningTime="2025-10-01 13:47:06.704707779 +0000 UTC m=+149.448683987" Oct 01 13:47:06 crc kubenswrapper[4605]: I1001 13:47:06.781652 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:47:06 crc kubenswrapper[4605]: E1001 13:47:06.781947 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 13:47:07.281932459 +0000 UTC m=+150.025908667 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:06 crc kubenswrapper[4605]: I1001 13:47:06.886455 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:47:06 crc kubenswrapper[4605]: E1001 13:47:06.886834 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 13:47:07.386815516 +0000 UTC m=+150.130791724 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ncbbt" (UID: "20fe9925-9f6b-4b69-a13d-e8ff88daaec6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:06 crc kubenswrapper[4605]: I1001 13:47:06.988691 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:47:06 crc kubenswrapper[4605]: E1001 13:47:06.989082 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 13:47:07.489061994 +0000 UTC m=+150.233038202 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:06 crc kubenswrapper[4605]: I1001 13:47:06.989564 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:47:06 crc kubenswrapper[4605]: E1001 13:47:06.989925 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 13:47:07.489913206 +0000 UTC m=+150.233889414 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ncbbt" (UID: "20fe9925-9f6b-4b69-a13d-e8ff88daaec6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:07 crc kubenswrapper[4605]: I1001 13:47:07.040477 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" Oct 01 13:47:07 crc kubenswrapper[4605]: I1001 13:47:07.093452 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:47:07 crc kubenswrapper[4605]: E1001 13:47:07.094085 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 13:47:07.594065803 +0000 UTC m=+150.338042021 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:07 crc kubenswrapper[4605]: I1001 13:47:07.195731 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:47:07 crc kubenswrapper[4605]: E1001 13:47:07.196004 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 13:47:07.695990403 +0000 UTC m=+150.439966611 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ncbbt" (UID: "20fe9925-9f6b-4b69-a13d-e8ff88daaec6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:07 crc kubenswrapper[4605]: I1001 13:47:07.287018 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-pwm5c" podStartSLOduration=127.28700154 podStartE2EDuration="2m7.28700154s" podCreationTimestamp="2025-10-01 13:45:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 13:47:06.754973501 +0000 UTC m=+149.498949719" watchObservedRunningTime="2025-10-01 13:47:07.28700154 +0000 UTC m=+150.030977748" Oct 01 13:47:07 crc kubenswrapper[4605]: I1001 13:47:07.296748 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:47:07 crc kubenswrapper[4605]: E1001 13:47:07.297168 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 13:47:07.797150993 +0000 UTC m=+150.541127201 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:07 crc kubenswrapper[4605]: I1001 13:47:07.297338 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:47:07 crc kubenswrapper[4605]: E1001 13:47:07.297699 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 13:47:07.797691047 +0000 UTC m=+150.541667255 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ncbbt" (UID: "20fe9925-9f6b-4b69-a13d-e8ff88daaec6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:07 crc kubenswrapper[4605]: I1001 13:47:07.383304 4605 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-hv7vm container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.42:8080/healthz\": dial tcp 10.217.0.42:8080: connect: connection refused" start-of-body= Oct 01 13:47:07 crc kubenswrapper[4605]: I1001 13:47:07.383436 4605 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-hv7vm" podUID="9317edb7-8cf8-4045-95a9-196349d21055" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.42:8080/healthz\": dial tcp 10.217.0.42:8080: connect: connection refused" Oct 01 13:47:07 crc kubenswrapper[4605]: I1001 13:47:07.404597 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:47:07 crc kubenswrapper[4605]: E1001 13:47:07.404876 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 13:47:07.904862702 +0000 UTC m=+150.648838910 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:07 crc kubenswrapper[4605]: I1001 13:47:07.411496 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rx67q" Oct 01 13:47:07 crc kubenswrapper[4605]: I1001 13:47:07.448294 4605 patch_prober.go:28] interesting pod/router-default-5444994796-kdrdj container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 01 13:47:07 crc kubenswrapper[4605]: [-]has-synced failed: reason withheld Oct 01 13:47:07 crc kubenswrapper[4605]: [+]process-running ok Oct 01 13:47:07 crc kubenswrapper[4605]: healthz check failed Oct 01 13:47:07 crc kubenswrapper[4605]: I1001 13:47:07.448345 4605 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-kdrdj" podUID="339fa1de-0cd5-4503-89c9-ff0ca9b0f8a9" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 01 13:47:07 crc kubenswrapper[4605]: I1001 13:47:07.506028 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:47:07 crc kubenswrapper[4605]: E1001 13:47:07.510207 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 13:47:08.01019185 +0000 UTC m=+150.754168118 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ncbbt" (UID: "20fe9925-9f6b-4b69-a13d-e8ff88daaec6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:07 crc kubenswrapper[4605]: I1001 13:47:07.612715 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:47:07 crc kubenswrapper[4605]: E1001 13:47:07.613074 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 13:47:08.113059355 +0000 UTC m=+150.857035553 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:07 crc kubenswrapper[4605]: I1001 13:47:07.713999 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:47:07 crc kubenswrapper[4605]: E1001 13:47:07.714334 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 13:47:08.214322737 +0000 UTC m=+150.958298935 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ncbbt" (UID: "20fe9925-9f6b-4b69-a13d-e8ff88daaec6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:07 crc kubenswrapper[4605]: I1001 13:47:07.815960 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:47:07 crc kubenswrapper[4605]: E1001 13:47:07.816221 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 13:47:08.316207486 +0000 UTC m=+151.060183694 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:07 crc kubenswrapper[4605]: I1001 13:47:07.816361 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:47:07 crc kubenswrapper[4605]: E1001 13:47:07.816687 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 13:47:08.316680488 +0000 UTC m=+151.060656686 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ncbbt" (UID: "20fe9925-9f6b-4b69-a13d-e8ff88daaec6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:07 crc kubenswrapper[4605]: I1001 13:47:07.917223 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:47:07 crc kubenswrapper[4605]: E1001 13:47:07.917412 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 13:47:08.417379657 +0000 UTC m=+151.161355865 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:07 crc kubenswrapper[4605]: I1001 13:47:07.917622 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:47:07 crc kubenswrapper[4605]: E1001 13:47:07.917966 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 13:47:08.417954121 +0000 UTC m=+151.161930329 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ncbbt" (UID: "20fe9925-9f6b-4b69-a13d-e8ff88daaec6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:08 crc kubenswrapper[4605]: I1001 13:47:08.018202 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:47:08 crc kubenswrapper[4605]: E1001 13:47:08.018403 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 13:47:08.518363972 +0000 UTC m=+151.262340180 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:08 crc kubenswrapper[4605]: I1001 13:47:08.018444 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:47:08 crc kubenswrapper[4605]: E1001 13:47:08.018898 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 13:47:08.518889366 +0000 UTC m=+151.262865574 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ncbbt" (UID: "20fe9925-9f6b-4b69-a13d-e8ff88daaec6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:08 crc kubenswrapper[4605]: I1001 13:47:08.119410 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:47:08 crc kubenswrapper[4605]: E1001 13:47:08.119676 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 13:47:08.619661806 +0000 UTC m=+151.363638014 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:08 crc kubenswrapper[4605]: I1001 13:47:08.221265 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:47:08 crc kubenswrapper[4605]: E1001 13:47:08.221628 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 13:47:08.721613706 +0000 UTC m=+151.465589914 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ncbbt" (UID: "20fe9925-9f6b-4b69-a13d-e8ff88daaec6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:08 crc kubenswrapper[4605]: I1001 13:47:08.322607 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:47:08 crc kubenswrapper[4605]: E1001 13:47:08.322749 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 13:47:08.822729445 +0000 UTC m=+151.566705653 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:08 crc kubenswrapper[4605]: I1001 13:47:08.323160 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:47:08 crc kubenswrapper[4605]: E1001 13:47:08.323442 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 13:47:08.823434793 +0000 UTC m=+151.567411001 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ncbbt" (UID: "20fe9925-9f6b-4b69-a13d-e8ff88daaec6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:08 crc kubenswrapper[4605]: I1001 13:47:08.380857 4605 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-dpknn container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.22:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Oct 01 13:47:08 crc kubenswrapper[4605]: I1001 13:47:08.380913 4605 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dpknn" podUID="263bef0b-5d5b-40e6-ac60-ff6c4d4217af" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.22:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Oct 01 13:47:08 crc kubenswrapper[4605]: I1001 13:47:08.385709 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-b84ph" Oct 01 13:47:08 crc kubenswrapper[4605]: I1001 13:47:08.387333 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-5fjl6" event={"ID":"c910e340-5d0a-4d6d-a570-3c1a5d82231b","Type":"ContainerStarted","Data":"26a1bf64d8330b7de71c8c7f0dc0dd2ed22848843845006170e695cfe2757599"} Oct 01 13:47:08 crc kubenswrapper[4605]: I1001 13:47:08.388083 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"161abad1b1aa5414d34f54fe7a82e057503bd050aa4afd2b4d80bcbfe95ae409"} Oct 01 13:47:08 crc kubenswrapper[4605]: I1001 13:47:08.389136 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"6d9476325f6a34937a4667c3025b89a8296573cdc0855c2b1537c765aff268c4"} Oct 01 13:47:08 crc kubenswrapper[4605]: I1001 13:47:08.389998 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"c93fcd962ec980740e8eea5d500c9d502a8c52f3791a834d9260c9c57fc32ed0"} Oct 01 13:47:08 crc kubenswrapper[4605]: I1001 13:47:08.424560 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:47:08 crc kubenswrapper[4605]: E1001 13:47:08.425707 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 13:47:08.925676811 +0000 UTC m=+151.669653019 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:08 crc kubenswrapper[4605]: I1001 13:47:08.450669 4605 patch_prober.go:28] interesting pod/router-default-5444994796-kdrdj container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 01 13:47:08 crc kubenswrapper[4605]: [-]has-synced failed: reason withheld Oct 01 13:47:08 crc kubenswrapper[4605]: [+]process-running ok Oct 01 13:47:08 crc kubenswrapper[4605]: healthz check failed Oct 01 13:47:08 crc kubenswrapper[4605]: I1001 13:47:08.450995 4605 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-kdrdj" podUID="339fa1de-0cd5-4503-89c9-ff0ca9b0f8a9" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 01 13:47:08 crc kubenswrapper[4605]: I1001 13:47:08.525925 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:47:08 crc kubenswrapper[4605]: E1001 13:47:08.526226 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 13:47:09.026214385 +0000 UTC m=+151.770190593 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ncbbt" (UID: "20fe9925-9f6b-4b69-a13d-e8ff88daaec6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:08 crc kubenswrapper[4605]: I1001 13:47:08.626584 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:47:08 crc kubenswrapper[4605]: E1001 13:47:08.626874 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 13:47:09.126859192 +0000 UTC m=+151.870835400 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:08 crc kubenswrapper[4605]: I1001 13:47:08.728248 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:47:08 crc kubenswrapper[4605]: E1001 13:47:08.728632 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 13:47:09.228618908 +0000 UTC m=+151.972595126 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ncbbt" (UID: "20fe9925-9f6b-4b69-a13d-e8ff88daaec6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:08 crc kubenswrapper[4605]: I1001 13:47:08.822194 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-z5nlj"] Oct 01 13:47:08 crc kubenswrapper[4605]: I1001 13:47:08.823845 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-z5nlj" Oct 01 13:47:08 crc kubenswrapper[4605]: I1001 13:47:08.826619 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Oct 01 13:47:08 crc kubenswrapper[4605]: I1001 13:47:08.830514 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:47:08 crc kubenswrapper[4605]: E1001 13:47:08.830764 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 13:47:09.330751093 +0000 UTC m=+152.074727301 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:08 crc kubenswrapper[4605]: I1001 13:47:08.846962 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-z5nlj"] Oct 01 13:47:08 crc kubenswrapper[4605]: I1001 13:47:08.933879 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:47:08 crc kubenswrapper[4605]: I1001 13:47:08.934209 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4hzng\" (UniqueName: \"kubernetes.io/projected/370d45c2-c748-4333-bbf5-9f2767f225f5-kube-api-access-4hzng\") pod \"certified-operators-z5nlj\" (UID: \"370d45c2-c748-4333-bbf5-9f2767f225f5\") " pod="openshift-marketplace/certified-operators-z5nlj" Oct 01 13:47:08 crc kubenswrapper[4605]: I1001 13:47:08.934245 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/370d45c2-c748-4333-bbf5-9f2767f225f5-utilities\") pod \"certified-operators-z5nlj\" (UID: \"370d45c2-c748-4333-bbf5-9f2767f225f5\") " pod="openshift-marketplace/certified-operators-z5nlj" Oct 01 13:47:08 crc kubenswrapper[4605]: I1001 13:47:08.934263 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/370d45c2-c748-4333-bbf5-9f2767f225f5-catalog-content\") pod \"certified-operators-z5nlj\" (UID: \"370d45c2-c748-4333-bbf5-9f2767f225f5\") " pod="openshift-marketplace/certified-operators-z5nlj" Oct 01 13:47:08 crc kubenswrapper[4605]: E1001 13:47:08.934425 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 13:47:09.434408368 +0000 UTC m=+152.178384576 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ncbbt" (UID: "20fe9925-9f6b-4b69-a13d-e8ff88daaec6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:08 crc kubenswrapper[4605]: I1001 13:47:08.948525 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-p7h8s"] Oct 01 13:47:08 crc kubenswrapper[4605]: I1001 13:47:08.949708 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-p7h8s" Oct 01 13:47:08 crc kubenswrapper[4605]: I1001 13:47:08.960768 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Oct 01 13:47:08 crc kubenswrapper[4605]: I1001 13:47:08.968586 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-p7h8s"] Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.036923 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.037129 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fd03b5be-380c-4bc8-aea1-2467ebe8a390-catalog-content\") pod \"community-operators-p7h8s\" (UID: \"fd03b5be-380c-4bc8-aea1-2467ebe8a390\") " pod="openshift-marketplace/community-operators-p7h8s" Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.037171 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4hzng\" (UniqueName: \"kubernetes.io/projected/370d45c2-c748-4333-bbf5-9f2767f225f5-kube-api-access-4hzng\") pod \"certified-operators-z5nlj\" (UID: \"370d45c2-c748-4333-bbf5-9f2767f225f5\") " pod="openshift-marketplace/certified-operators-z5nlj" Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.037211 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xmqsf\" (UniqueName: \"kubernetes.io/projected/fd03b5be-380c-4bc8-aea1-2467ebe8a390-kube-api-access-xmqsf\") pod \"community-operators-p7h8s\" (UID: \"fd03b5be-380c-4bc8-aea1-2467ebe8a390\") " pod="openshift-marketplace/community-operators-p7h8s" Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.037254 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/370d45c2-c748-4333-bbf5-9f2767f225f5-utilities\") pod \"certified-operators-z5nlj\" (UID: \"370d45c2-c748-4333-bbf5-9f2767f225f5\") " pod="openshift-marketplace/certified-operators-z5nlj" Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.037277 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fd03b5be-380c-4bc8-aea1-2467ebe8a390-utilities\") pod \"community-operators-p7h8s\" (UID: \"fd03b5be-380c-4bc8-aea1-2467ebe8a390\") " pod="openshift-marketplace/community-operators-p7h8s" Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.037298 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/370d45c2-c748-4333-bbf5-9f2767f225f5-catalog-content\") pod \"certified-operators-z5nlj\" (UID: \"370d45c2-c748-4333-bbf5-9f2767f225f5\") " pod="openshift-marketplace/certified-operators-z5nlj" Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.037820 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/370d45c2-c748-4333-bbf5-9f2767f225f5-catalog-content\") pod \"certified-operators-z5nlj\" (UID: \"370d45c2-c748-4333-bbf5-9f2767f225f5\") " pod="openshift-marketplace/certified-operators-z5nlj" Oct 01 13:47:09 crc kubenswrapper[4605]: E1001 13:47:09.037905 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 13:47:09.537880518 +0000 UTC m=+152.281856726 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.038504 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/370d45c2-c748-4333-bbf5-9f2767f225f5-utilities\") pod \"certified-operators-z5nlj\" (UID: \"370d45c2-c748-4333-bbf5-9f2767f225f5\") " pod="openshift-marketplace/certified-operators-z5nlj" Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.044672 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-trd6j" Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.051918 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-2brwf" Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.052989 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-2brwf" Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.056214 4605 patch_prober.go:28] interesting pod/console-f9d7485db-2brwf container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.12:8443/health\": dial tcp 10.217.0.12:8443: connect: connection refused" start-of-body= Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.056259 4605 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-2brwf" podUID="33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb" containerName="console" probeResult="failure" output="Get \"https://10.217.0.12:8443/health\": dial tcp 10.217.0.12:8443: connect: connection refused" Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.082424 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-2bkjp" Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.084265 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4hzng\" (UniqueName: \"kubernetes.io/projected/370d45c2-c748-4333-bbf5-9f2767f225f5-kube-api-access-4hzng\") pod \"certified-operators-z5nlj\" (UID: \"370d45c2-c748-4333-bbf5-9f2767f225f5\") " pod="openshift-marketplace/certified-operators-z5nlj" Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.135659 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-n2zhx"] Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.136598 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-n2zhx" Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.138652 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.138817 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fd03b5be-380c-4bc8-aea1-2467ebe8a390-catalog-content\") pod \"community-operators-p7h8s\" (UID: \"fd03b5be-380c-4bc8-aea1-2467ebe8a390\") " pod="openshift-marketplace/community-operators-p7h8s" Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.138959 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xmqsf\" (UniqueName: \"kubernetes.io/projected/fd03b5be-380c-4bc8-aea1-2467ebe8a390-kube-api-access-xmqsf\") pod \"community-operators-p7h8s\" (UID: \"fd03b5be-380c-4bc8-aea1-2467ebe8a390\") " pod="openshift-marketplace/community-operators-p7h8s" Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.139122 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fd03b5be-380c-4bc8-aea1-2467ebe8a390-utilities\") pod \"community-operators-p7h8s\" (UID: \"fd03b5be-380c-4bc8-aea1-2467ebe8a390\") " pod="openshift-marketplace/community-operators-p7h8s" Oct 01 13:47:09 crc kubenswrapper[4605]: E1001 13:47:09.141488 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 13:47:09.641473141 +0000 UTC m=+152.385449429 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ncbbt" (UID: "20fe9925-9f6b-4b69-a13d-e8ff88daaec6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.142251 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fd03b5be-380c-4bc8-aea1-2467ebe8a390-utilities\") pod \"community-operators-p7h8s\" (UID: \"fd03b5be-380c-4bc8-aea1-2467ebe8a390\") " pod="openshift-marketplace/community-operators-p7h8s" Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.142261 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fd03b5be-380c-4bc8-aea1-2467ebe8a390-catalog-content\") pod \"community-operators-p7h8s\" (UID: \"fd03b5be-380c-4bc8-aea1-2467ebe8a390\") " pod="openshift-marketplace/community-operators-p7h8s" Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.150163 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-z5nlj" Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.173476 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-n2zhx"] Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.220247 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pxc25" Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.220492 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-qmh5z" Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.221785 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pxc25" Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.222550 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-qmh5z" Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.231216 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xmqsf\" (UniqueName: \"kubernetes.io/projected/fd03b5be-380c-4bc8-aea1-2467ebe8a390-kube-api-access-xmqsf\") pod \"community-operators-p7h8s\" (UID: \"fd03b5be-380c-4bc8-aea1-2467ebe8a390\") " pod="openshift-marketplace/community-operators-p7h8s" Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.242576 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:47:09 crc kubenswrapper[4605]: E1001 13:47:09.242784 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 13:47:09.742756314 +0000 UTC m=+152.486732522 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.243056 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.243217 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sdz8z\" (UniqueName: \"kubernetes.io/projected/69ee908d-c416-425e-acbf-0b212debb8dc-kube-api-access-sdz8z\") pod \"certified-operators-n2zhx\" (UID: \"69ee908d-c416-425e-acbf-0b212debb8dc\") " pod="openshift-marketplace/certified-operators-n2zhx" Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.243310 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/69ee908d-c416-425e-acbf-0b212debb8dc-utilities\") pod \"certified-operators-n2zhx\" (UID: \"69ee908d-c416-425e-acbf-0b212debb8dc\") " pod="openshift-marketplace/certified-operators-n2zhx" Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.243449 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/69ee908d-c416-425e-acbf-0b212debb8dc-catalog-content\") pod \"certified-operators-n2zhx\" (UID: \"69ee908d-c416-425e-acbf-0b212debb8dc\") " pod="openshift-marketplace/certified-operators-n2zhx" Oct 01 13:47:09 crc kubenswrapper[4605]: E1001 13:47:09.244883 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 13:47:09.744871809 +0000 UTC m=+152.488848017 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ncbbt" (UID: "20fe9925-9f6b-4b69-a13d-e8ff88daaec6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.259251 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pxc25" Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.259635 4605 patch_prober.go:28] interesting pod/downloads-7954f5f757-48sxr container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.267043 4605 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-48sxr" podUID="0c93dc9b-fba0-4d28-b8b3-8def5f66d466" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.259677 4605 patch_prober.go:28] interesting pod/downloads-7954f5f757-48sxr container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.267501 4605 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-48sxr" podUID="0c93dc9b-fba0-4d28-b8b3-8def5f66d466" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.274627 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-p7h8s" Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.345001 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:47:09 crc kubenswrapper[4605]: E1001 13:47:09.345111 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 13:47:09.845081234 +0000 UTC m=+152.589057442 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.345229 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sdz8z\" (UniqueName: \"kubernetes.io/projected/69ee908d-c416-425e-acbf-0b212debb8dc-kube-api-access-sdz8z\") pod \"certified-operators-n2zhx\" (UID: \"69ee908d-c416-425e-acbf-0b212debb8dc\") " pod="openshift-marketplace/certified-operators-n2zhx" Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.345254 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/69ee908d-c416-425e-acbf-0b212debb8dc-utilities\") pod \"certified-operators-n2zhx\" (UID: \"69ee908d-c416-425e-acbf-0b212debb8dc\") " pod="openshift-marketplace/certified-operators-n2zhx" Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.345297 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/69ee908d-c416-425e-acbf-0b212debb8dc-catalog-content\") pod \"certified-operators-n2zhx\" (UID: \"69ee908d-c416-425e-acbf-0b212debb8dc\") " pod="openshift-marketplace/certified-operators-n2zhx" Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.345337 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:47:09 crc kubenswrapper[4605]: E1001 13:47:09.345794 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 13:47:09.845787182 +0000 UTC m=+152.589763390 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ncbbt" (UID: "20fe9925-9f6b-4b69-a13d-e8ff88daaec6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.346009 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/69ee908d-c416-425e-acbf-0b212debb8dc-catalog-content\") pod \"certified-operators-n2zhx\" (UID: \"69ee908d-c416-425e-acbf-0b212debb8dc\") " pod="openshift-marketplace/certified-operators-n2zhx" Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.346359 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/69ee908d-c416-425e-acbf-0b212debb8dc-utilities\") pod \"certified-operators-n2zhx\" (UID: \"69ee908d-c416-425e-acbf-0b212debb8dc\") " pod="openshift-marketplace/certified-operators-n2zhx" Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.350186 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-tlgqx"] Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.351032 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tlgqx" Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.379561 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-tlgqx"] Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.384033 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sdz8z\" (UniqueName: \"kubernetes.io/projected/69ee908d-c416-425e-acbf-0b212debb8dc-kube-api-access-sdz8z\") pod \"certified-operators-n2zhx\" (UID: \"69ee908d-c416-425e-acbf-0b212debb8dc\") " pod="openshift-marketplace/certified-operators-n2zhx" Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.391171 4605 patch_prober.go:28] interesting pod/openshift-config-operator-7777fb866f-b84ph container/openshift-config-operator namespace/openshift-config-operator: Readiness probe status=failure output="Get \"https://10.217.0.30:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.391381 4605 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-config-operator/openshift-config-operator-7777fb866f-b84ph" podUID="e7df6d80-4ed7-4192-b079-bd0119903e10" containerName="openshift-config-operator" probeResult="failure" output="Get \"https://10.217.0.30:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.392005 4605 patch_prober.go:28] interesting pod/openshift-config-operator-7777fb866f-b84ph container/openshift-config-operator namespace/openshift-config-operator: Liveness probe status=failure output="Get \"https://10.217.0.30:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.392064 4605 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-config-operator/openshift-config-operator-7777fb866f-b84ph" podUID="e7df6d80-4ed7-4192-b079-bd0119903e10" containerName="openshift-config-operator" probeResult="failure" output="Get \"https://10.217.0.30:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.448078 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.448295 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pt28q\" (UniqueName: \"kubernetes.io/projected/6639ba49-1401-4dc4-be03-f3f7bc43145c-kube-api-access-pt28q\") pod \"community-operators-tlgqx\" (UID: \"6639ba49-1401-4dc4-be03-f3f7bc43145c\") " pod="openshift-marketplace/community-operators-tlgqx" Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.448367 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6639ba49-1401-4dc4-be03-f3f7bc43145c-utilities\") pod \"community-operators-tlgqx\" (UID: \"6639ba49-1401-4dc4-be03-f3f7bc43145c\") " pod="openshift-marketplace/community-operators-tlgqx" Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.448408 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6639ba49-1401-4dc4-be03-f3f7bc43145c-catalog-content\") pod \"community-operators-tlgqx\" (UID: \"6639ba49-1401-4dc4-be03-f3f7bc43145c\") " pod="openshift-marketplace/community-operators-tlgqx" Oct 01 13:47:09 crc kubenswrapper[4605]: E1001 13:47:09.448546 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 13:47:09.948529194 +0000 UTC m=+152.692505402 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.461125 4605 patch_prober.go:28] interesting pod/router-default-5444994796-kdrdj container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 01 13:47:09 crc kubenswrapper[4605]: [-]has-synced failed: reason withheld Oct 01 13:47:09 crc kubenswrapper[4605]: [+]process-running ok Oct 01 13:47:09 crc kubenswrapper[4605]: healthz check failed Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.461500 4605 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-kdrdj" podUID="339fa1de-0cd5-4503-89c9-ff0ca9b0f8a9" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.464578 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-5fjl6" event={"ID":"c910e340-5d0a-4d6d-a570-3c1a5d82231b","Type":"ContainerStarted","Data":"e77909069b33cafc3b2a9c5fb52a4d42098625884e348108ccfd5c327ddb9e84"} Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.509180 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"bcef12dd609d3d29f51e9150a79b1880d5b0e1215422331fb03fe66aaeb08940"} Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.518629 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-n2zhx" Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.520602 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"4f42bbf5c28e90d2e812f08f4475806dc5652c80c77ad2960ff45c47102ad398"} Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.520985 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.552439 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pt28q\" (UniqueName: \"kubernetes.io/projected/6639ba49-1401-4dc4-be03-f3f7bc43145c-kube-api-access-pt28q\") pod \"community-operators-tlgqx\" (UID: \"6639ba49-1401-4dc4-be03-f3f7bc43145c\") " pod="openshift-marketplace/community-operators-tlgqx" Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.552529 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6639ba49-1401-4dc4-be03-f3f7bc43145c-utilities\") pod \"community-operators-tlgqx\" (UID: \"6639ba49-1401-4dc4-be03-f3f7bc43145c\") " pod="openshift-marketplace/community-operators-tlgqx" Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.552556 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.552576 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6639ba49-1401-4dc4-be03-f3f7bc43145c-catalog-content\") pod \"community-operators-tlgqx\" (UID: \"6639ba49-1401-4dc4-be03-f3f7bc43145c\") " pod="openshift-marketplace/community-operators-tlgqx" Oct 01 13:47:09 crc kubenswrapper[4605]: E1001 13:47:09.553347 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 13:47:10.053332408 +0000 UTC m=+152.797308616 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ncbbt" (UID: "20fe9925-9f6b-4b69-a13d-e8ff88daaec6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.555921 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6639ba49-1401-4dc4-be03-f3f7bc43145c-catalog-content\") pod \"community-operators-tlgqx\" (UID: \"6639ba49-1401-4dc4-be03-f3f7bc43145c\") " pod="openshift-marketplace/community-operators-tlgqx" Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.556821 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6639ba49-1401-4dc4-be03-f3f7bc43145c-utilities\") pod \"community-operators-tlgqx\" (UID: \"6639ba49-1401-4dc4-be03-f3f7bc43145c\") " pod="openshift-marketplace/community-operators-tlgqx" Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.585662 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"3554ffc4403c49f7eb331ebf1e9d051f1661a1bab6a7e6112ac22c54af0f8268"} Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.593147 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.593770 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.602515 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.602664 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.603455 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pxc25" Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.619499 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pt28q\" (UniqueName: \"kubernetes.io/projected/6639ba49-1401-4dc4-be03-f3f7bc43145c-kube-api-access-pt28q\") pod \"community-operators-tlgqx\" (UID: \"6639ba49-1401-4dc4-be03-f3f7bc43145c\") " pod="openshift-marketplace/community-operators-tlgqx" Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.621518 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.654238 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.654488 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/bad96939-62fd-4f1d-975f-1de2d5a868c4-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"bad96939-62fd-4f1d-975f-1de2d5a868c4\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.654544 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/bad96939-62fd-4f1d-975f-1de2d5a868c4-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"bad96939-62fd-4f1d-975f-1de2d5a868c4\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 01 13:47:09 crc kubenswrapper[4605]: E1001 13:47:09.655294 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 13:47:10.155273998 +0000 UTC m=+152.899250206 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.702453 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tlgqx" Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.752007 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-b84ph" Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.755629 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.755956 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/bad96939-62fd-4f1d-975f-1de2d5a868c4-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"bad96939-62fd-4f1d-975f-1de2d5a868c4\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 01 13:47:09 crc kubenswrapper[4605]: E1001 13:47:09.755960 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 13:47:10.255946976 +0000 UTC m=+152.999923184 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ncbbt" (UID: "20fe9925-9f6b-4b69-a13d-e8ff88daaec6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.755992 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/bad96939-62fd-4f1d-975f-1de2d5a868c4-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"bad96939-62fd-4f1d-975f-1de2d5a868c4\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.756123 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/bad96939-62fd-4f1d-975f-1de2d5a868c4-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"bad96939-62fd-4f1d-975f-1de2d5a868c4\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.775407 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-hq6j4" Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.783404 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-dpknn" Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.786972 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/bad96939-62fd-4f1d-975f-1de2d5a868c4-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"bad96939-62fd-4f1d-975f-1de2d5a868c4\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.872462 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:47:09 crc kubenswrapper[4605]: E1001 13:47:09.872638 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 13:47:10.372608947 +0000 UTC m=+153.116585155 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.872863 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:47:09 crc kubenswrapper[4605]: E1001 13:47:09.874323 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 13:47:10.374311921 +0000 UTC m=+153.118288129 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ncbbt" (UID: "20fe9925-9f6b-4b69-a13d-e8ff88daaec6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.963871 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.976404 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:47:09 crc kubenswrapper[4605]: E1001 13:47:09.976874 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 13:47:10.476853807 +0000 UTC m=+153.220830015 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:09 crc kubenswrapper[4605]: I1001 13:47:09.991901 4605 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Oct 01 13:47:10 crc kubenswrapper[4605]: I1001 13:47:10.078152 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:47:10 crc kubenswrapper[4605]: E1001 13:47:10.078513 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 13:47:10.57850027 +0000 UTC m=+153.322476478 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ncbbt" (UID: "20fe9925-9f6b-4b69-a13d-e8ff88daaec6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:10 crc kubenswrapper[4605]: I1001 13:47:10.110848 4605 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-10-01T13:47:09.991926798Z","Handler":null,"Name":""} Oct 01 13:47:10 crc kubenswrapper[4605]: I1001 13:47:10.178812 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:47:10 crc kubenswrapper[4605]: E1001 13:47:10.178967 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 13:47:10.678942661 +0000 UTC m=+153.422918869 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:10 crc kubenswrapper[4605]: I1001 13:47:10.179145 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:47:10 crc kubenswrapper[4605]: E1001 13:47:10.179476 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 13:47:10.679462125 +0000 UTC m=+153.423438333 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ncbbt" (UID: "20fe9925-9f6b-4b69-a13d-e8ff88daaec6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:10 crc kubenswrapper[4605]: I1001 13:47:10.279859 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:47:10 crc kubenswrapper[4605]: E1001 13:47:10.280263 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 13:47:10.780245105 +0000 UTC m=+153.524221313 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 13:47:10 crc kubenswrapper[4605]: I1001 13:47:10.292164 4605 patch_prober.go:28] interesting pod/apiserver-76f77b778f-qmh5z container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Oct 01 13:47:10 crc kubenswrapper[4605]: [+]log ok Oct 01 13:47:10 crc kubenswrapper[4605]: [+]etcd ok Oct 01 13:47:10 crc kubenswrapper[4605]: [+]poststarthook/start-apiserver-admission-initializer ok Oct 01 13:47:10 crc kubenswrapper[4605]: [+]poststarthook/generic-apiserver-start-informers ok Oct 01 13:47:10 crc kubenswrapper[4605]: [+]poststarthook/max-in-flight-filter ok Oct 01 13:47:10 crc kubenswrapper[4605]: [+]poststarthook/storage-object-count-tracker-hook ok Oct 01 13:47:10 crc kubenswrapper[4605]: [+]poststarthook/image.openshift.io-apiserver-caches ok Oct 01 13:47:10 crc kubenswrapper[4605]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Oct 01 13:47:10 crc kubenswrapper[4605]: [-]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa failed: reason withheld Oct 01 13:47:10 crc kubenswrapper[4605]: [+]poststarthook/project.openshift.io-projectcache ok Oct 01 13:47:10 crc kubenswrapper[4605]: [+]poststarthook/project.openshift.io-projectauthorizationcache ok Oct 01 13:47:10 crc kubenswrapper[4605]: [+]poststarthook/openshift.io-startinformers ok Oct 01 13:47:10 crc kubenswrapper[4605]: [+]poststarthook/openshift.io-restmapperupdater ok Oct 01 13:47:10 crc kubenswrapper[4605]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Oct 01 13:47:10 crc kubenswrapper[4605]: livez check failed Oct 01 13:47:10 crc kubenswrapper[4605]: I1001 13:47:10.292219 4605 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-qmh5z" podUID="9b2768b9-4054-4f67-a937-2050eebe9c2f" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 01 13:47:10 crc kubenswrapper[4605]: I1001 13:47:10.326972 4605 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Oct 01 13:47:10 crc kubenswrapper[4605]: I1001 13:47:10.327015 4605 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Oct 01 13:47:10 crc kubenswrapper[4605]: I1001 13:47:10.382406 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:47:10 crc kubenswrapper[4605]: I1001 13:47:10.435473 4605 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 01 13:47:10 crc kubenswrapper[4605]: I1001 13:47:10.435507 4605 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:47:10 crc kubenswrapper[4605]: I1001 13:47:10.442333 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-kdrdj" Oct 01 13:47:10 crc kubenswrapper[4605]: I1001 13:47:10.452474 4605 patch_prober.go:28] interesting pod/router-default-5444994796-kdrdj container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 01 13:47:10 crc kubenswrapper[4605]: [-]has-synced failed: reason withheld Oct 01 13:47:10 crc kubenswrapper[4605]: [+]process-running ok Oct 01 13:47:10 crc kubenswrapper[4605]: healthz check failed Oct 01 13:47:10 crc kubenswrapper[4605]: I1001 13:47:10.452536 4605 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-kdrdj" podUID="339fa1de-0cd5-4503-89c9-ff0ca9b0f8a9" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 01 13:47:10 crc kubenswrapper[4605]: I1001 13:47:10.472553 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-hv7vm" Oct 01 13:47:10 crc kubenswrapper[4605]: I1001 13:47:10.577518 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-z5nlj"] Oct 01 13:47:10 crc kubenswrapper[4605]: W1001 13:47:10.626283 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod370d45c2_c748_4333_bbf5_9f2767f225f5.slice/crio-fe2149bc7fa24528c87436a854f70b26eebc9f9e5760ea88d8079fa386064b41 WatchSource:0}: Error finding container fe2149bc7fa24528c87436a854f70b26eebc9f9e5760ea88d8079fa386064b41: Status 404 returned error can't find the container with id fe2149bc7fa24528c87436a854f70b26eebc9f9e5760ea88d8079fa386064b41 Oct 01 13:47:10 crc kubenswrapper[4605]: I1001 13:47:10.636478 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-5fjl6" event={"ID":"c910e340-5d0a-4d6d-a570-3c1a5d82231b","Type":"ContainerStarted","Data":"9bf6a546589e22684dd7de4a067c0c590bd1b427c972e982ac3ca744bd9f128b"} Oct 01 13:47:10 crc kubenswrapper[4605]: I1001 13:47:10.640519 4605 generic.go:334] "Generic (PLEG): container finished" podID="ea3e40db-79b6-4499-b7e6-71bd46c55663" containerID="7541ddd5837ac92c9e1d93dfba42f821bf4ae79da8f3d4c3a1e40a4700fbd9e8" exitCode=0 Oct 01 13:47:10 crc kubenswrapper[4605]: I1001 13:47:10.641004 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29322105-br8qr" event={"ID":"ea3e40db-79b6-4499-b7e6-71bd46c55663","Type":"ContainerDied","Data":"7541ddd5837ac92c9e1d93dfba42f821bf4ae79da8f3d4c3a1e40a4700fbd9e8"} Oct 01 13:47:10 crc kubenswrapper[4605]: I1001 13:47:10.671346 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-5fjl6" podStartSLOduration=13.671330962999999 podStartE2EDuration="13.671330963s" podCreationTimestamp="2025-10-01 13:46:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 13:47:10.669556867 +0000 UTC m=+153.413533075" watchObservedRunningTime="2025-10-01 13:47:10.671330963 +0000 UTC m=+153.415307171" Oct 01 13:47:10 crc kubenswrapper[4605]: I1001 13:47:10.689143 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ncbbt\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:47:10 crc kubenswrapper[4605]: I1001 13:47:10.750702 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Oct 01 13:47:10 crc kubenswrapper[4605]: I1001 13:47:10.792699 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 13:47:10 crc kubenswrapper[4605]: I1001 13:47:10.802010 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-p7h8s"] Oct 01 13:47:10 crc kubenswrapper[4605]: I1001 13:47:10.847587 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:47:10 crc kubenswrapper[4605]: I1001 13:47:10.872902 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Oct 01 13:47:10 crc kubenswrapper[4605]: I1001 13:47:10.879344 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-n2zhx"] Oct 01 13:47:10 crc kubenswrapper[4605]: I1001 13:47:10.904260 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-tlgqx"] Oct 01 13:47:10 crc kubenswrapper[4605]: I1001 13:47:10.932507 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-xnpnq"] Oct 01 13:47:10 crc kubenswrapper[4605]: I1001 13:47:10.933523 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xnpnq" Oct 01 13:47:10 crc kubenswrapper[4605]: I1001 13:47:10.938218 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Oct 01 13:47:10 crc kubenswrapper[4605]: I1001 13:47:10.951039 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-xnpnq"] Oct 01 13:47:11 crc kubenswrapper[4605]: I1001 13:47:11.005709 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hqn7v\" (UniqueName: \"kubernetes.io/projected/6b4dc6f5-28e8-4c4e-bd69-59fa8a1d60a4-kube-api-access-hqn7v\") pod \"redhat-marketplace-xnpnq\" (UID: \"6b4dc6f5-28e8-4c4e-bd69-59fa8a1d60a4\") " pod="openshift-marketplace/redhat-marketplace-xnpnq" Oct 01 13:47:11 crc kubenswrapper[4605]: I1001 13:47:11.005763 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6b4dc6f5-28e8-4c4e-bd69-59fa8a1d60a4-utilities\") pod \"redhat-marketplace-xnpnq\" (UID: \"6b4dc6f5-28e8-4c4e-bd69-59fa8a1d60a4\") " pod="openshift-marketplace/redhat-marketplace-xnpnq" Oct 01 13:47:11 crc kubenswrapper[4605]: I1001 13:47:11.005804 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6b4dc6f5-28e8-4c4e-bd69-59fa8a1d60a4-catalog-content\") pod \"redhat-marketplace-xnpnq\" (UID: \"6b4dc6f5-28e8-4c4e-bd69-59fa8a1d60a4\") " pod="openshift-marketplace/redhat-marketplace-xnpnq" Oct 01 13:47:11 crc kubenswrapper[4605]: I1001 13:47:11.109662 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hqn7v\" (UniqueName: \"kubernetes.io/projected/6b4dc6f5-28e8-4c4e-bd69-59fa8a1d60a4-kube-api-access-hqn7v\") pod \"redhat-marketplace-xnpnq\" (UID: \"6b4dc6f5-28e8-4c4e-bd69-59fa8a1d60a4\") " pod="openshift-marketplace/redhat-marketplace-xnpnq" Oct 01 13:47:11 crc kubenswrapper[4605]: I1001 13:47:11.109726 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6b4dc6f5-28e8-4c4e-bd69-59fa8a1d60a4-utilities\") pod \"redhat-marketplace-xnpnq\" (UID: \"6b4dc6f5-28e8-4c4e-bd69-59fa8a1d60a4\") " pod="openshift-marketplace/redhat-marketplace-xnpnq" Oct 01 13:47:11 crc kubenswrapper[4605]: I1001 13:47:11.109772 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6b4dc6f5-28e8-4c4e-bd69-59fa8a1d60a4-catalog-content\") pod \"redhat-marketplace-xnpnq\" (UID: \"6b4dc6f5-28e8-4c4e-bd69-59fa8a1d60a4\") " pod="openshift-marketplace/redhat-marketplace-xnpnq" Oct 01 13:47:11 crc kubenswrapper[4605]: I1001 13:47:11.110480 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6b4dc6f5-28e8-4c4e-bd69-59fa8a1d60a4-catalog-content\") pod \"redhat-marketplace-xnpnq\" (UID: \"6b4dc6f5-28e8-4c4e-bd69-59fa8a1d60a4\") " pod="openshift-marketplace/redhat-marketplace-xnpnq" Oct 01 13:47:11 crc kubenswrapper[4605]: I1001 13:47:11.110498 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6b4dc6f5-28e8-4c4e-bd69-59fa8a1d60a4-utilities\") pod \"redhat-marketplace-xnpnq\" (UID: \"6b4dc6f5-28e8-4c4e-bd69-59fa8a1d60a4\") " pod="openshift-marketplace/redhat-marketplace-xnpnq" Oct 01 13:47:11 crc kubenswrapper[4605]: I1001 13:47:11.157361 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hqn7v\" (UniqueName: \"kubernetes.io/projected/6b4dc6f5-28e8-4c4e-bd69-59fa8a1d60a4-kube-api-access-hqn7v\") pod \"redhat-marketplace-xnpnq\" (UID: \"6b4dc6f5-28e8-4c4e-bd69-59fa8a1d60a4\") " pod="openshift-marketplace/redhat-marketplace-xnpnq" Oct 01 13:47:11 crc kubenswrapper[4605]: I1001 13:47:11.284955 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xnpnq" Oct 01 13:47:11 crc kubenswrapper[4605]: I1001 13:47:11.342136 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-z9jm5"] Oct 01 13:47:11 crc kubenswrapper[4605]: I1001 13:47:11.354809 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-z9jm5" Oct 01 13:47:11 crc kubenswrapper[4605]: I1001 13:47:11.373893 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-z9jm5"] Oct 01 13:47:11 crc kubenswrapper[4605]: I1001 13:47:11.415665 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ad7c6809-d039-42e8-a1d9-3872a3751ad4-utilities\") pod \"redhat-marketplace-z9jm5\" (UID: \"ad7c6809-d039-42e8-a1d9-3872a3751ad4\") " pod="openshift-marketplace/redhat-marketplace-z9jm5" Oct 01 13:47:11 crc kubenswrapper[4605]: I1001 13:47:11.415802 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b7rp6\" (UniqueName: \"kubernetes.io/projected/ad7c6809-d039-42e8-a1d9-3872a3751ad4-kube-api-access-b7rp6\") pod \"redhat-marketplace-z9jm5\" (UID: \"ad7c6809-d039-42e8-a1d9-3872a3751ad4\") " pod="openshift-marketplace/redhat-marketplace-z9jm5" Oct 01 13:47:11 crc kubenswrapper[4605]: I1001 13:47:11.415830 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ad7c6809-d039-42e8-a1d9-3872a3751ad4-catalog-content\") pod \"redhat-marketplace-z9jm5\" (UID: \"ad7c6809-d039-42e8-a1d9-3872a3751ad4\") " pod="openshift-marketplace/redhat-marketplace-z9jm5" Oct 01 13:47:11 crc kubenswrapper[4605]: I1001 13:47:11.448061 4605 patch_prober.go:28] interesting pod/router-default-5444994796-kdrdj container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 01 13:47:11 crc kubenswrapper[4605]: [-]has-synced failed: reason withheld Oct 01 13:47:11 crc kubenswrapper[4605]: [+]process-running ok Oct 01 13:47:11 crc kubenswrapper[4605]: healthz check failed Oct 01 13:47:11 crc kubenswrapper[4605]: I1001 13:47:11.448666 4605 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-kdrdj" podUID="339fa1de-0cd5-4503-89c9-ff0ca9b0f8a9" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 01 13:47:11 crc kubenswrapper[4605]: I1001 13:47:11.518404 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ad7c6809-d039-42e8-a1d9-3872a3751ad4-catalog-content\") pod \"redhat-marketplace-z9jm5\" (UID: \"ad7c6809-d039-42e8-a1d9-3872a3751ad4\") " pod="openshift-marketplace/redhat-marketplace-z9jm5" Oct 01 13:47:11 crc kubenswrapper[4605]: I1001 13:47:11.518450 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ad7c6809-d039-42e8-a1d9-3872a3751ad4-utilities\") pod \"redhat-marketplace-z9jm5\" (UID: \"ad7c6809-d039-42e8-a1d9-3872a3751ad4\") " pod="openshift-marketplace/redhat-marketplace-z9jm5" Oct 01 13:47:11 crc kubenswrapper[4605]: I1001 13:47:11.518545 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b7rp6\" (UniqueName: \"kubernetes.io/projected/ad7c6809-d039-42e8-a1d9-3872a3751ad4-kube-api-access-b7rp6\") pod \"redhat-marketplace-z9jm5\" (UID: \"ad7c6809-d039-42e8-a1d9-3872a3751ad4\") " pod="openshift-marketplace/redhat-marketplace-z9jm5" Oct 01 13:47:11 crc kubenswrapper[4605]: I1001 13:47:11.519272 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ad7c6809-d039-42e8-a1d9-3872a3751ad4-catalog-content\") pod \"redhat-marketplace-z9jm5\" (UID: \"ad7c6809-d039-42e8-a1d9-3872a3751ad4\") " pod="openshift-marketplace/redhat-marketplace-z9jm5" Oct 01 13:47:11 crc kubenswrapper[4605]: I1001 13:47:11.519505 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ad7c6809-d039-42e8-a1d9-3872a3751ad4-utilities\") pod \"redhat-marketplace-z9jm5\" (UID: \"ad7c6809-d039-42e8-a1d9-3872a3751ad4\") " pod="openshift-marketplace/redhat-marketplace-z9jm5" Oct 01 13:47:11 crc kubenswrapper[4605]: I1001 13:47:11.565434 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b7rp6\" (UniqueName: \"kubernetes.io/projected/ad7c6809-d039-42e8-a1d9-3872a3751ad4-kube-api-access-b7rp6\") pod \"redhat-marketplace-z9jm5\" (UID: \"ad7c6809-d039-42e8-a1d9-3872a3751ad4\") " pod="openshift-marketplace/redhat-marketplace-z9jm5" Oct 01 13:47:11 crc kubenswrapper[4605]: I1001 13:47:11.652699 4605 generic.go:334] "Generic (PLEG): container finished" podID="69ee908d-c416-425e-acbf-0b212debb8dc" containerID="099615b2d8586dacf63fa6e85e621bdf7fd9a60239bbc13517eba1250255b798" exitCode=0 Oct 01 13:47:11 crc kubenswrapper[4605]: I1001 13:47:11.652794 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-n2zhx" event={"ID":"69ee908d-c416-425e-acbf-0b212debb8dc","Type":"ContainerDied","Data":"099615b2d8586dacf63fa6e85e621bdf7fd9a60239bbc13517eba1250255b798"} Oct 01 13:47:11 crc kubenswrapper[4605]: I1001 13:47:11.652834 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-n2zhx" event={"ID":"69ee908d-c416-425e-acbf-0b212debb8dc","Type":"ContainerStarted","Data":"ed7b6778382991ba795a0e33efcd22804bbac51561afe311d2069f61e6f6030a"} Oct 01 13:47:11 crc kubenswrapper[4605]: I1001 13:47:11.654727 4605 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 01 13:47:11 crc kubenswrapper[4605]: I1001 13:47:11.657006 4605 generic.go:334] "Generic (PLEG): container finished" podID="370d45c2-c748-4333-bbf5-9f2767f225f5" containerID="40891b62860c6f16833eed3299afba11b7486b5f1565ceff8dfe731bf1245a10" exitCode=0 Oct 01 13:47:11 crc kubenswrapper[4605]: I1001 13:47:11.657064 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z5nlj" event={"ID":"370d45c2-c748-4333-bbf5-9f2767f225f5","Type":"ContainerDied","Data":"40891b62860c6f16833eed3299afba11b7486b5f1565ceff8dfe731bf1245a10"} Oct 01 13:47:11 crc kubenswrapper[4605]: I1001 13:47:11.657085 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z5nlj" event={"ID":"370d45c2-c748-4333-bbf5-9f2767f225f5","Type":"ContainerStarted","Data":"fe2149bc7fa24528c87436a854f70b26eebc9f9e5760ea88d8079fa386064b41"} Oct 01 13:47:11 crc kubenswrapper[4605]: I1001 13:47:11.665842 4605 generic.go:334] "Generic (PLEG): container finished" podID="6639ba49-1401-4dc4-be03-f3f7bc43145c" containerID="db604bd7592f84855d12f40d6cb86d2103956b2a152d9c27f9a60335086f9a55" exitCode=0 Oct 01 13:47:11 crc kubenswrapper[4605]: I1001 13:47:11.665910 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tlgqx" event={"ID":"6639ba49-1401-4dc4-be03-f3f7bc43145c","Type":"ContainerDied","Data":"db604bd7592f84855d12f40d6cb86d2103956b2a152d9c27f9a60335086f9a55"} Oct 01 13:47:11 crc kubenswrapper[4605]: I1001 13:47:11.665935 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tlgqx" event={"ID":"6639ba49-1401-4dc4-be03-f3f7bc43145c","Type":"ContainerStarted","Data":"dd2871f5f0e4486f3ad5aff9487a158a612405fff63628f5037732779f0d5be7"} Oct 01 13:47:11 crc kubenswrapper[4605]: I1001 13:47:11.671491 4605 generic.go:334] "Generic (PLEG): container finished" podID="fd03b5be-380c-4bc8-aea1-2467ebe8a390" containerID="ffed4ee6a730dc72dc7f028320b8146a2c83b8126433e3c9f25d87fa4814d2cc" exitCode=0 Oct 01 13:47:11 crc kubenswrapper[4605]: I1001 13:47:11.671553 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-p7h8s" event={"ID":"fd03b5be-380c-4bc8-aea1-2467ebe8a390","Type":"ContainerDied","Data":"ffed4ee6a730dc72dc7f028320b8146a2c83b8126433e3c9f25d87fa4814d2cc"} Oct 01 13:47:11 crc kubenswrapper[4605]: I1001 13:47:11.671584 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-p7h8s" event={"ID":"fd03b5be-380c-4bc8-aea1-2467ebe8a390","Type":"ContainerStarted","Data":"d7ee2a35530ef79df850dfe17e834aafaa957ec139c177604d3df761ea318428"} Oct 01 13:47:11 crc kubenswrapper[4605]: I1001 13:47:11.684381 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-z9jm5" Oct 01 13:47:11 crc kubenswrapper[4605]: I1001 13:47:11.685057 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"bad96939-62fd-4f1d-975f-1de2d5a868c4","Type":"ContainerStarted","Data":"57194d14fe6fe47338cf2ab933913209355b66d3bd3229b95cfa41bd1ba91e9a"} Oct 01 13:47:11 crc kubenswrapper[4605]: I1001 13:47:11.685086 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"bad96939-62fd-4f1d-975f-1de2d5a868c4","Type":"ContainerStarted","Data":"0bd8d09471cd988d88955bad2f1f1e4de8961f189ec3a6e905b4f3b8229e549f"} Oct 01 13:47:11 crc kubenswrapper[4605]: I1001 13:47:11.700697 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-ncbbt"] Oct 01 13:47:11 crc kubenswrapper[4605]: W1001 13:47:11.726070 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod20fe9925_9f6b_4b69_a13d_e8ff88daaec6.slice/crio-b2cf20413417acb64c83ee50e4dcb9325ca820cae7e2b5bcb9c30fb3bfefad6e WatchSource:0}: Error finding container b2cf20413417acb64c83ee50e4dcb9325ca820cae7e2b5bcb9c30fb3bfefad6e: Status 404 returned error can't find the container with id b2cf20413417acb64c83ee50e4dcb9325ca820cae7e2b5bcb9c30fb3bfefad6e Oct 01 13:47:11 crc kubenswrapper[4605]: I1001 13:47:11.741676 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=2.741661955 podStartE2EDuration="2.741661955s" podCreationTimestamp="2025-10-01 13:47:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 13:47:11.741649695 +0000 UTC m=+154.485625903" watchObservedRunningTime="2025-10-01 13:47:11.741661955 +0000 UTC m=+154.485638163" Oct 01 13:47:11 crc kubenswrapper[4605]: I1001 13:47:11.794148 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-xnpnq"] Oct 01 13:47:11 crc kubenswrapper[4605]: I1001 13:47:11.935714 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Oct 01 13:47:11 crc kubenswrapper[4605]: I1001 13:47:11.944224 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-pcw9k"] Oct 01 13:47:11 crc kubenswrapper[4605]: I1001 13:47:11.959848 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pcw9k" Oct 01 13:47:11 crc kubenswrapper[4605]: I1001 13:47:11.962946 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Oct 01 13:47:11 crc kubenswrapper[4605]: I1001 13:47:11.968041 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-pcw9k"] Oct 01 13:47:12 crc kubenswrapper[4605]: I1001 13:47:12.030721 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/877e04cf-b58e-470f-adc7-8f7fdb0ccddb-catalog-content\") pod \"redhat-operators-pcw9k\" (UID: \"877e04cf-b58e-470f-adc7-8f7fdb0ccddb\") " pod="openshift-marketplace/redhat-operators-pcw9k" Oct 01 13:47:12 crc kubenswrapper[4605]: I1001 13:47:12.031044 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/877e04cf-b58e-470f-adc7-8f7fdb0ccddb-utilities\") pod \"redhat-operators-pcw9k\" (UID: \"877e04cf-b58e-470f-adc7-8f7fdb0ccddb\") " pod="openshift-marketplace/redhat-operators-pcw9k" Oct 01 13:47:12 crc kubenswrapper[4605]: I1001 13:47:12.031066 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6qfhb\" (UniqueName: \"kubernetes.io/projected/877e04cf-b58e-470f-adc7-8f7fdb0ccddb-kube-api-access-6qfhb\") pod \"redhat-operators-pcw9k\" (UID: \"877e04cf-b58e-470f-adc7-8f7fdb0ccddb\") " pod="openshift-marketplace/redhat-operators-pcw9k" Oct 01 13:47:12 crc kubenswrapper[4605]: I1001 13:47:12.102981 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29322105-br8qr" Oct 01 13:47:12 crc kubenswrapper[4605]: I1001 13:47:12.132758 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/877e04cf-b58e-470f-adc7-8f7fdb0ccddb-catalog-content\") pod \"redhat-operators-pcw9k\" (UID: \"877e04cf-b58e-470f-adc7-8f7fdb0ccddb\") " pod="openshift-marketplace/redhat-operators-pcw9k" Oct 01 13:47:12 crc kubenswrapper[4605]: I1001 13:47:12.132830 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/877e04cf-b58e-470f-adc7-8f7fdb0ccddb-utilities\") pod \"redhat-operators-pcw9k\" (UID: \"877e04cf-b58e-470f-adc7-8f7fdb0ccddb\") " pod="openshift-marketplace/redhat-operators-pcw9k" Oct 01 13:47:12 crc kubenswrapper[4605]: I1001 13:47:12.132850 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6qfhb\" (UniqueName: \"kubernetes.io/projected/877e04cf-b58e-470f-adc7-8f7fdb0ccddb-kube-api-access-6qfhb\") pod \"redhat-operators-pcw9k\" (UID: \"877e04cf-b58e-470f-adc7-8f7fdb0ccddb\") " pod="openshift-marketplace/redhat-operators-pcw9k" Oct 01 13:47:12 crc kubenswrapper[4605]: I1001 13:47:12.133416 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-z9jm5"] Oct 01 13:47:12 crc kubenswrapper[4605]: I1001 13:47:12.133555 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/877e04cf-b58e-470f-adc7-8f7fdb0ccddb-catalog-content\") pod \"redhat-operators-pcw9k\" (UID: \"877e04cf-b58e-470f-adc7-8f7fdb0ccddb\") " pod="openshift-marketplace/redhat-operators-pcw9k" Oct 01 13:47:12 crc kubenswrapper[4605]: I1001 13:47:12.133944 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/877e04cf-b58e-470f-adc7-8f7fdb0ccddb-utilities\") pod \"redhat-operators-pcw9k\" (UID: \"877e04cf-b58e-470f-adc7-8f7fdb0ccddb\") " pod="openshift-marketplace/redhat-operators-pcw9k" Oct 01 13:47:12 crc kubenswrapper[4605]: I1001 13:47:12.171676 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6qfhb\" (UniqueName: \"kubernetes.io/projected/877e04cf-b58e-470f-adc7-8f7fdb0ccddb-kube-api-access-6qfhb\") pod \"redhat-operators-pcw9k\" (UID: \"877e04cf-b58e-470f-adc7-8f7fdb0ccddb\") " pod="openshift-marketplace/redhat-operators-pcw9k" Oct 01 13:47:12 crc kubenswrapper[4605]: I1001 13:47:12.233500 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ea3e40db-79b6-4499-b7e6-71bd46c55663-config-volume\") pod \"ea3e40db-79b6-4499-b7e6-71bd46c55663\" (UID: \"ea3e40db-79b6-4499-b7e6-71bd46c55663\") " Oct 01 13:47:12 crc kubenswrapper[4605]: I1001 13:47:12.233576 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ea3e40db-79b6-4499-b7e6-71bd46c55663-secret-volume\") pod \"ea3e40db-79b6-4499-b7e6-71bd46c55663\" (UID: \"ea3e40db-79b6-4499-b7e6-71bd46c55663\") " Oct 01 13:47:12 crc kubenswrapper[4605]: I1001 13:47:12.233640 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2m9jl\" (UniqueName: \"kubernetes.io/projected/ea3e40db-79b6-4499-b7e6-71bd46c55663-kube-api-access-2m9jl\") pod \"ea3e40db-79b6-4499-b7e6-71bd46c55663\" (UID: \"ea3e40db-79b6-4499-b7e6-71bd46c55663\") " Oct 01 13:47:12 crc kubenswrapper[4605]: I1001 13:47:12.234639 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ea3e40db-79b6-4499-b7e6-71bd46c55663-config-volume" (OuterVolumeSpecName: "config-volume") pod "ea3e40db-79b6-4499-b7e6-71bd46c55663" (UID: "ea3e40db-79b6-4499-b7e6-71bd46c55663"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:47:12 crc kubenswrapper[4605]: I1001 13:47:12.244714 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea3e40db-79b6-4499-b7e6-71bd46c55663-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "ea3e40db-79b6-4499-b7e6-71bd46c55663" (UID: "ea3e40db-79b6-4499-b7e6-71bd46c55663"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:47:12 crc kubenswrapper[4605]: I1001 13:47:12.247766 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ea3e40db-79b6-4499-b7e6-71bd46c55663-kube-api-access-2m9jl" (OuterVolumeSpecName: "kube-api-access-2m9jl") pod "ea3e40db-79b6-4499-b7e6-71bd46c55663" (UID: "ea3e40db-79b6-4499-b7e6-71bd46c55663"). InnerVolumeSpecName "kube-api-access-2m9jl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:47:12 crc kubenswrapper[4605]: I1001 13:47:12.279601 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pcw9k" Oct 01 13:47:12 crc kubenswrapper[4605]: I1001 13:47:12.335524 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2m9jl\" (UniqueName: \"kubernetes.io/projected/ea3e40db-79b6-4499-b7e6-71bd46c55663-kube-api-access-2m9jl\") on node \"crc\" DevicePath \"\"" Oct 01 13:47:12 crc kubenswrapper[4605]: I1001 13:47:12.335558 4605 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ea3e40db-79b6-4499-b7e6-71bd46c55663-config-volume\") on node \"crc\" DevicePath \"\"" Oct 01 13:47:12 crc kubenswrapper[4605]: I1001 13:47:12.335567 4605 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ea3e40db-79b6-4499-b7e6-71bd46c55663-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 01 13:47:12 crc kubenswrapper[4605]: I1001 13:47:12.337050 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-mh782"] Oct 01 13:47:12 crc kubenswrapper[4605]: E1001 13:47:12.337326 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea3e40db-79b6-4499-b7e6-71bd46c55663" containerName="collect-profiles" Oct 01 13:47:12 crc kubenswrapper[4605]: I1001 13:47:12.337348 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea3e40db-79b6-4499-b7e6-71bd46c55663" containerName="collect-profiles" Oct 01 13:47:12 crc kubenswrapper[4605]: I1001 13:47:12.337476 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea3e40db-79b6-4499-b7e6-71bd46c55663" containerName="collect-profiles" Oct 01 13:47:12 crc kubenswrapper[4605]: I1001 13:47:12.338405 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mh782" Oct 01 13:47:12 crc kubenswrapper[4605]: I1001 13:47:12.351165 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-mh782"] Oct 01 13:47:12 crc kubenswrapper[4605]: I1001 13:47:12.436770 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/abf8ee39-bbbb-4d2a-9503-ed6b2ecbf051-catalog-content\") pod \"redhat-operators-mh782\" (UID: \"abf8ee39-bbbb-4d2a-9503-ed6b2ecbf051\") " pod="openshift-marketplace/redhat-operators-mh782" Oct 01 13:47:12 crc kubenswrapper[4605]: I1001 13:47:12.436872 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nzs6p\" (UniqueName: \"kubernetes.io/projected/abf8ee39-bbbb-4d2a-9503-ed6b2ecbf051-kube-api-access-nzs6p\") pod \"redhat-operators-mh782\" (UID: \"abf8ee39-bbbb-4d2a-9503-ed6b2ecbf051\") " pod="openshift-marketplace/redhat-operators-mh782" Oct 01 13:47:12 crc kubenswrapper[4605]: I1001 13:47:12.436940 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/abf8ee39-bbbb-4d2a-9503-ed6b2ecbf051-utilities\") pod \"redhat-operators-mh782\" (UID: \"abf8ee39-bbbb-4d2a-9503-ed6b2ecbf051\") " pod="openshift-marketplace/redhat-operators-mh782" Oct 01 13:47:12 crc kubenswrapper[4605]: I1001 13:47:12.450410 4605 patch_prober.go:28] interesting pod/router-default-5444994796-kdrdj container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 01 13:47:12 crc kubenswrapper[4605]: [-]has-synced failed: reason withheld Oct 01 13:47:12 crc kubenswrapper[4605]: [+]process-running ok Oct 01 13:47:12 crc kubenswrapper[4605]: healthz check failed Oct 01 13:47:12 crc kubenswrapper[4605]: I1001 13:47:12.450457 4605 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-kdrdj" podUID="339fa1de-0cd5-4503-89c9-ff0ca9b0f8a9" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 01 13:47:12 crc kubenswrapper[4605]: I1001 13:47:12.540013 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/abf8ee39-bbbb-4d2a-9503-ed6b2ecbf051-catalog-content\") pod \"redhat-operators-mh782\" (UID: \"abf8ee39-bbbb-4d2a-9503-ed6b2ecbf051\") " pod="openshift-marketplace/redhat-operators-mh782" Oct 01 13:47:12 crc kubenswrapper[4605]: I1001 13:47:12.541144 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/abf8ee39-bbbb-4d2a-9503-ed6b2ecbf051-catalog-content\") pod \"redhat-operators-mh782\" (UID: \"abf8ee39-bbbb-4d2a-9503-ed6b2ecbf051\") " pod="openshift-marketplace/redhat-operators-mh782" Oct 01 13:47:12 crc kubenswrapper[4605]: I1001 13:47:12.541823 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nzs6p\" (UniqueName: \"kubernetes.io/projected/abf8ee39-bbbb-4d2a-9503-ed6b2ecbf051-kube-api-access-nzs6p\") pod \"redhat-operators-mh782\" (UID: \"abf8ee39-bbbb-4d2a-9503-ed6b2ecbf051\") " pod="openshift-marketplace/redhat-operators-mh782" Oct 01 13:47:12 crc kubenswrapper[4605]: I1001 13:47:12.542138 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/abf8ee39-bbbb-4d2a-9503-ed6b2ecbf051-utilities\") pod \"redhat-operators-mh782\" (UID: \"abf8ee39-bbbb-4d2a-9503-ed6b2ecbf051\") " pod="openshift-marketplace/redhat-operators-mh782" Oct 01 13:47:12 crc kubenswrapper[4605]: I1001 13:47:12.542427 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/abf8ee39-bbbb-4d2a-9503-ed6b2ecbf051-utilities\") pod \"redhat-operators-mh782\" (UID: \"abf8ee39-bbbb-4d2a-9503-ed6b2ecbf051\") " pod="openshift-marketplace/redhat-operators-mh782" Oct 01 13:47:12 crc kubenswrapper[4605]: I1001 13:47:12.566197 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-pcw9k"] Oct 01 13:47:12 crc kubenswrapper[4605]: I1001 13:47:12.589883 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nzs6p\" (UniqueName: \"kubernetes.io/projected/abf8ee39-bbbb-4d2a-9503-ed6b2ecbf051-kube-api-access-nzs6p\") pod \"redhat-operators-mh782\" (UID: \"abf8ee39-bbbb-4d2a-9503-ed6b2ecbf051\") " pod="openshift-marketplace/redhat-operators-mh782" Oct 01 13:47:12 crc kubenswrapper[4605]: W1001 13:47:12.614276 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod877e04cf_b58e_470f_adc7_8f7fdb0ccddb.slice/crio-09cc7399d65e74604846511f1e51791d8f006c44d91382361bda0852951b1376 WatchSource:0}: Error finding container 09cc7399d65e74604846511f1e51791d8f006c44d91382361bda0852951b1376: Status 404 returned error can't find the container with id 09cc7399d65e74604846511f1e51791d8f006c44d91382361bda0852951b1376 Oct 01 13:47:12 crc kubenswrapper[4605]: I1001 13:47:12.658224 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mh782" Oct 01 13:47:12 crc kubenswrapper[4605]: I1001 13:47:12.695454 4605 generic.go:334] "Generic (PLEG): container finished" podID="bad96939-62fd-4f1d-975f-1de2d5a868c4" containerID="57194d14fe6fe47338cf2ab933913209355b66d3bd3229b95cfa41bd1ba91e9a" exitCode=0 Oct 01 13:47:12 crc kubenswrapper[4605]: I1001 13:47:12.695648 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"bad96939-62fd-4f1d-975f-1de2d5a868c4","Type":"ContainerDied","Data":"57194d14fe6fe47338cf2ab933913209355b66d3bd3229b95cfa41bd1ba91e9a"} Oct 01 13:47:12 crc kubenswrapper[4605]: I1001 13:47:12.704634 4605 generic.go:334] "Generic (PLEG): container finished" podID="6b4dc6f5-28e8-4c4e-bd69-59fa8a1d60a4" containerID="491e68479872bb407b194aa011b816646f5a89a27b0d8eb9a873ce2e1ea967fa" exitCode=0 Oct 01 13:47:12 crc kubenswrapper[4605]: I1001 13:47:12.704694 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xnpnq" event={"ID":"6b4dc6f5-28e8-4c4e-bd69-59fa8a1d60a4","Type":"ContainerDied","Data":"491e68479872bb407b194aa011b816646f5a89a27b0d8eb9a873ce2e1ea967fa"} Oct 01 13:47:12 crc kubenswrapper[4605]: I1001 13:47:12.704718 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xnpnq" event={"ID":"6b4dc6f5-28e8-4c4e-bd69-59fa8a1d60a4","Type":"ContainerStarted","Data":"a1ae50b660c1539bd2cea95d8e3326e87ebbf043ce04dfa25b021164d6987def"} Oct 01 13:47:12 crc kubenswrapper[4605]: I1001 13:47:12.708175 4605 generic.go:334] "Generic (PLEG): container finished" podID="ad7c6809-d039-42e8-a1d9-3872a3751ad4" containerID="6daae6576e44f19034d93d29964e5cb82140c0e35c1067a637163c699a179eb0" exitCode=0 Oct 01 13:47:12 crc kubenswrapper[4605]: I1001 13:47:12.708238 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-z9jm5" event={"ID":"ad7c6809-d039-42e8-a1d9-3872a3751ad4","Type":"ContainerDied","Data":"6daae6576e44f19034d93d29964e5cb82140c0e35c1067a637163c699a179eb0"} Oct 01 13:47:12 crc kubenswrapper[4605]: I1001 13:47:12.708264 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-z9jm5" event={"ID":"ad7c6809-d039-42e8-a1d9-3872a3751ad4","Type":"ContainerStarted","Data":"aec471d7ee3cc4d42d079311f91894b202bfd3c2c7fc56949baf97599c1768b2"} Oct 01 13:47:12 crc kubenswrapper[4605]: I1001 13:47:12.715252 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29322105-br8qr" event={"ID":"ea3e40db-79b6-4499-b7e6-71bd46c55663","Type":"ContainerDied","Data":"edafa7bbe010f55576837f8b98c55515e20cd51478079aa696d8b2b3b68a968d"} Oct 01 13:47:12 crc kubenswrapper[4605]: I1001 13:47:12.715315 4605 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="edafa7bbe010f55576837f8b98c55515e20cd51478079aa696d8b2b3b68a968d" Oct 01 13:47:12 crc kubenswrapper[4605]: I1001 13:47:12.715425 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29322105-br8qr" Oct 01 13:47:12 crc kubenswrapper[4605]: I1001 13:47:12.736215 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" event={"ID":"20fe9925-9f6b-4b69-a13d-e8ff88daaec6","Type":"ContainerStarted","Data":"7c9529b403cdb1fb229b4da78c3369686e8b204613ebe3ce0f79128c6b740297"} Oct 01 13:47:12 crc kubenswrapper[4605]: I1001 13:47:12.736260 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" event={"ID":"20fe9925-9f6b-4b69-a13d-e8ff88daaec6","Type":"ContainerStarted","Data":"b2cf20413417acb64c83ee50e4dcb9325ca820cae7e2b5bcb9c30fb3bfefad6e"} Oct 01 13:47:12 crc kubenswrapper[4605]: I1001 13:47:12.736880 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:47:12 crc kubenswrapper[4605]: I1001 13:47:12.745204 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pcw9k" event={"ID":"877e04cf-b58e-470f-adc7-8f7fdb0ccddb","Type":"ContainerStarted","Data":"09cc7399d65e74604846511f1e51791d8f006c44d91382361bda0852951b1376"} Oct 01 13:47:12 crc kubenswrapper[4605]: I1001 13:47:12.793643 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" podStartSLOduration=133.793625801 podStartE2EDuration="2m13.793625801s" podCreationTimestamp="2025-10-01 13:44:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 13:47:12.792701477 +0000 UTC m=+155.536677685" watchObservedRunningTime="2025-10-01 13:47:12.793625801 +0000 UTC m=+155.537602009" Oct 01 13:47:13 crc kubenswrapper[4605]: I1001 13:47:13.216646 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-mh782"] Oct 01 13:47:13 crc kubenswrapper[4605]: W1001 13:47:13.261692 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podabf8ee39_bbbb_4d2a_9503_ed6b2ecbf051.slice/crio-a260fa80b6a438ff74a6c9e163c0c41b8314a474e5c42aa1495e7ddd14358b4a WatchSource:0}: Error finding container a260fa80b6a438ff74a6c9e163c0c41b8314a474e5c42aa1495e7ddd14358b4a: Status 404 returned error can't find the container with id a260fa80b6a438ff74a6c9e163c0c41b8314a474e5c42aa1495e7ddd14358b4a Oct 01 13:47:13 crc kubenswrapper[4605]: I1001 13:47:13.445368 4605 patch_prober.go:28] interesting pod/router-default-5444994796-kdrdj container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 01 13:47:13 crc kubenswrapper[4605]: [-]has-synced failed: reason withheld Oct 01 13:47:13 crc kubenswrapper[4605]: [+]process-running ok Oct 01 13:47:13 crc kubenswrapper[4605]: healthz check failed Oct 01 13:47:13 crc kubenswrapper[4605]: I1001 13:47:13.445448 4605 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-kdrdj" podUID="339fa1de-0cd5-4503-89c9-ff0ca9b0f8a9" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 01 13:47:13 crc kubenswrapper[4605]: I1001 13:47:13.688634 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Oct 01 13:47:13 crc kubenswrapper[4605]: I1001 13:47:13.689549 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 01 13:47:13 crc kubenswrapper[4605]: I1001 13:47:13.691485 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Oct 01 13:47:13 crc kubenswrapper[4605]: I1001 13:47:13.693777 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Oct 01 13:47:13 crc kubenswrapper[4605]: I1001 13:47:13.693935 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Oct 01 13:47:13 crc kubenswrapper[4605]: I1001 13:47:13.765786 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/23fa8712-77a3-4af4-95cb-827936ad280a-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"23fa8712-77a3-4af4-95cb-827936ad280a\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 01 13:47:13 crc kubenswrapper[4605]: I1001 13:47:13.765882 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/23fa8712-77a3-4af4-95cb-827936ad280a-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"23fa8712-77a3-4af4-95cb-827936ad280a\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 01 13:47:13 crc kubenswrapper[4605]: I1001 13:47:13.813814 4605 generic.go:334] "Generic (PLEG): container finished" podID="877e04cf-b58e-470f-adc7-8f7fdb0ccddb" containerID="71748170473e3b9e4ef254ae21ee6dcafa847c4b275cf3df8038a68742fa1408" exitCode=0 Oct 01 13:47:13 crc kubenswrapper[4605]: I1001 13:47:13.814169 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pcw9k" event={"ID":"877e04cf-b58e-470f-adc7-8f7fdb0ccddb","Type":"ContainerDied","Data":"71748170473e3b9e4ef254ae21ee6dcafa847c4b275cf3df8038a68742fa1408"} Oct 01 13:47:13 crc kubenswrapper[4605]: I1001 13:47:13.828122 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mh782" event={"ID":"abf8ee39-bbbb-4d2a-9503-ed6b2ecbf051","Type":"ContainerStarted","Data":"32ccfab0b0c952cfbdae39884ed3466fd736d6c5c9d248552d1a1eff0848cdd7"} Oct 01 13:47:13 crc kubenswrapper[4605]: I1001 13:47:13.828159 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mh782" event={"ID":"abf8ee39-bbbb-4d2a-9503-ed6b2ecbf051","Type":"ContainerStarted","Data":"a260fa80b6a438ff74a6c9e163c0c41b8314a474e5c42aa1495e7ddd14358b4a"} Oct 01 13:47:13 crc kubenswrapper[4605]: I1001 13:47:13.866688 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/23fa8712-77a3-4af4-95cb-827936ad280a-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"23fa8712-77a3-4af4-95cb-827936ad280a\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 01 13:47:13 crc kubenswrapper[4605]: I1001 13:47:13.866768 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/23fa8712-77a3-4af4-95cb-827936ad280a-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"23fa8712-77a3-4af4-95cb-827936ad280a\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 01 13:47:13 crc kubenswrapper[4605]: I1001 13:47:13.867430 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/23fa8712-77a3-4af4-95cb-827936ad280a-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"23fa8712-77a3-4af4-95cb-827936ad280a\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 01 13:47:13 crc kubenswrapper[4605]: I1001 13:47:13.919491 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/23fa8712-77a3-4af4-95cb-827936ad280a-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"23fa8712-77a3-4af4-95cb-827936ad280a\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 01 13:47:14 crc kubenswrapper[4605]: I1001 13:47:14.046140 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 01 13:47:14 crc kubenswrapper[4605]: I1001 13:47:14.164978 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 01 13:47:14 crc kubenswrapper[4605]: I1001 13:47:14.206063 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-qmh5z" Oct 01 13:47:14 crc kubenswrapper[4605]: I1001 13:47:14.210614 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-qmh5z" Oct 01 13:47:14 crc kubenswrapper[4605]: I1001 13:47:14.274462 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/bad96939-62fd-4f1d-975f-1de2d5a868c4-kube-api-access\") pod \"bad96939-62fd-4f1d-975f-1de2d5a868c4\" (UID: \"bad96939-62fd-4f1d-975f-1de2d5a868c4\") " Oct 01 13:47:14 crc kubenswrapper[4605]: I1001 13:47:14.274586 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/bad96939-62fd-4f1d-975f-1de2d5a868c4-kubelet-dir\") pod \"bad96939-62fd-4f1d-975f-1de2d5a868c4\" (UID: \"bad96939-62fd-4f1d-975f-1de2d5a868c4\") " Oct 01 13:47:14 crc kubenswrapper[4605]: I1001 13:47:14.276208 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bad96939-62fd-4f1d-975f-1de2d5a868c4-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "bad96939-62fd-4f1d-975f-1de2d5a868c4" (UID: "bad96939-62fd-4f1d-975f-1de2d5a868c4"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 13:47:14 crc kubenswrapper[4605]: I1001 13:47:14.279764 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bad96939-62fd-4f1d-975f-1de2d5a868c4-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "bad96939-62fd-4f1d-975f-1de2d5a868c4" (UID: "bad96939-62fd-4f1d-975f-1de2d5a868c4"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:47:14 crc kubenswrapper[4605]: I1001 13:47:14.376766 4605 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/bad96939-62fd-4f1d-975f-1de2d5a868c4-kubelet-dir\") on node \"crc\" DevicePath \"\"" Oct 01 13:47:14 crc kubenswrapper[4605]: I1001 13:47:14.377132 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/bad96939-62fd-4f1d-975f-1de2d5a868c4-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 01 13:47:14 crc kubenswrapper[4605]: I1001 13:47:14.449480 4605 patch_prober.go:28] interesting pod/router-default-5444994796-kdrdj container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 01 13:47:14 crc kubenswrapper[4605]: [+]has-synced ok Oct 01 13:47:14 crc kubenswrapper[4605]: [+]process-running ok Oct 01 13:47:14 crc kubenswrapper[4605]: healthz check failed Oct 01 13:47:14 crc kubenswrapper[4605]: I1001 13:47:14.449539 4605 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-kdrdj" podUID="339fa1de-0cd5-4503-89c9-ff0ca9b0f8a9" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 01 13:47:14 crc kubenswrapper[4605]: I1001 13:47:14.555413 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Oct 01 13:47:14 crc kubenswrapper[4605]: W1001 13:47:14.691242 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod23fa8712_77a3_4af4_95cb_827936ad280a.slice/crio-0eb5365444f41b4e16a1bd207cf213746b4d0738222e986a46ad4281900a5b26 WatchSource:0}: Error finding container 0eb5365444f41b4e16a1bd207cf213746b4d0738222e986a46ad4281900a5b26: Status 404 returned error can't find the container with id 0eb5365444f41b4e16a1bd207cf213746b4d0738222e986a46ad4281900a5b26 Oct 01 13:47:14 crc kubenswrapper[4605]: I1001 13:47:14.873041 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 01 13:47:14 crc kubenswrapper[4605]: I1001 13:47:14.873280 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"bad96939-62fd-4f1d-975f-1de2d5a868c4","Type":"ContainerDied","Data":"0bd8d09471cd988d88955bad2f1f1e4de8961f189ec3a6e905b4f3b8229e549f"} Oct 01 13:47:14 crc kubenswrapper[4605]: I1001 13:47:14.873333 4605 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0bd8d09471cd988d88955bad2f1f1e4de8961f189ec3a6e905b4f3b8229e549f" Oct 01 13:47:14 crc kubenswrapper[4605]: I1001 13:47:14.889218 4605 generic.go:334] "Generic (PLEG): container finished" podID="abf8ee39-bbbb-4d2a-9503-ed6b2ecbf051" containerID="32ccfab0b0c952cfbdae39884ed3466fd736d6c5c9d248552d1a1eff0848cdd7" exitCode=0 Oct 01 13:47:14 crc kubenswrapper[4605]: I1001 13:47:14.889548 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mh782" event={"ID":"abf8ee39-bbbb-4d2a-9503-ed6b2ecbf051","Type":"ContainerDied","Data":"32ccfab0b0c952cfbdae39884ed3466fd736d6c5c9d248552d1a1eff0848cdd7"} Oct 01 13:47:14 crc kubenswrapper[4605]: I1001 13:47:14.915433 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"23fa8712-77a3-4af4-95cb-827936ad280a","Type":"ContainerStarted","Data":"0eb5365444f41b4e16a1bd207cf213746b4d0738222e986a46ad4281900a5b26"} Oct 01 13:47:15 crc kubenswrapper[4605]: I1001 13:47:15.226112 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-9h8zv" Oct 01 13:47:15 crc kubenswrapper[4605]: I1001 13:47:15.446474 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-kdrdj" Oct 01 13:47:15 crc kubenswrapper[4605]: I1001 13:47:15.448596 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-kdrdj" Oct 01 13:47:16 crc kubenswrapper[4605]: I1001 13:47:16.044736 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"23fa8712-77a3-4af4-95cb-827936ad280a","Type":"ContainerStarted","Data":"c24b09c5549b32be08fa9b76b03fcb40d23ab348c6a5378a014ca7bd87dbf32d"} Oct 01 13:47:16 crc kubenswrapper[4605]: I1001 13:47:16.064909 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=3.064892397 podStartE2EDuration="3.064892397s" podCreationTimestamp="2025-10-01 13:47:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 13:47:16.061564771 +0000 UTC m=+158.805540999" watchObservedRunningTime="2025-10-01 13:47:16.064892397 +0000 UTC m=+158.808868605" Oct 01 13:47:17 crc kubenswrapper[4605]: I1001 13:47:17.091387 4605 generic.go:334] "Generic (PLEG): container finished" podID="23fa8712-77a3-4af4-95cb-827936ad280a" containerID="c24b09c5549b32be08fa9b76b03fcb40d23ab348c6a5378a014ca7bd87dbf32d" exitCode=0 Oct 01 13:47:17 crc kubenswrapper[4605]: I1001 13:47:17.091562 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"23fa8712-77a3-4af4-95cb-827936ad280a","Type":"ContainerDied","Data":"c24b09c5549b32be08fa9b76b03fcb40d23ab348c6a5378a014ca7bd87dbf32d"} Oct 01 13:47:18 crc kubenswrapper[4605]: I1001 13:47:18.487958 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 01 13:47:18 crc kubenswrapper[4605]: I1001 13:47:18.507063 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/23fa8712-77a3-4af4-95cb-827936ad280a-kubelet-dir\") pod \"23fa8712-77a3-4af4-95cb-827936ad280a\" (UID: \"23fa8712-77a3-4af4-95cb-827936ad280a\") " Oct 01 13:47:18 crc kubenswrapper[4605]: I1001 13:47:18.507165 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/23fa8712-77a3-4af4-95cb-827936ad280a-kube-api-access\") pod \"23fa8712-77a3-4af4-95cb-827936ad280a\" (UID: \"23fa8712-77a3-4af4-95cb-827936ad280a\") " Oct 01 13:47:18 crc kubenswrapper[4605]: I1001 13:47:18.509862 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/23fa8712-77a3-4af4-95cb-827936ad280a-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "23fa8712-77a3-4af4-95cb-827936ad280a" (UID: "23fa8712-77a3-4af4-95cb-827936ad280a"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 13:47:18 crc kubenswrapper[4605]: I1001 13:47:18.532008 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/23fa8712-77a3-4af4-95cb-827936ad280a-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "23fa8712-77a3-4af4-95cb-827936ad280a" (UID: "23fa8712-77a3-4af4-95cb-827936ad280a"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:47:18 crc kubenswrapper[4605]: I1001 13:47:18.608302 4605 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/23fa8712-77a3-4af4-95cb-827936ad280a-kubelet-dir\") on node \"crc\" DevicePath \"\"" Oct 01 13:47:18 crc kubenswrapper[4605]: I1001 13:47:18.608330 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/23fa8712-77a3-4af4-95cb-827936ad280a-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 01 13:47:19 crc kubenswrapper[4605]: I1001 13:47:19.058233 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-2brwf" Oct 01 13:47:19 crc kubenswrapper[4605]: I1001 13:47:19.063526 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-2brwf" Oct 01 13:47:19 crc kubenswrapper[4605]: I1001 13:47:19.137306 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 01 13:47:19 crc kubenswrapper[4605]: I1001 13:47:19.137366 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"23fa8712-77a3-4af4-95cb-827936ad280a","Type":"ContainerDied","Data":"0eb5365444f41b4e16a1bd207cf213746b4d0738222e986a46ad4281900a5b26"} Oct 01 13:47:19 crc kubenswrapper[4605]: I1001 13:47:19.137426 4605 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0eb5365444f41b4e16a1bd207cf213746b4d0738222e986a46ad4281900a5b26" Oct 01 13:47:19 crc kubenswrapper[4605]: I1001 13:47:19.257291 4605 patch_prober.go:28] interesting pod/downloads-7954f5f757-48sxr container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Oct 01 13:47:19 crc kubenswrapper[4605]: I1001 13:47:19.257383 4605 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-48sxr" podUID="0c93dc9b-fba0-4d28-b8b3-8def5f66d466" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Oct 01 13:47:19 crc kubenswrapper[4605]: I1001 13:47:19.257518 4605 patch_prober.go:28] interesting pod/downloads-7954f5f757-48sxr container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Oct 01 13:47:19 crc kubenswrapper[4605]: I1001 13:47:19.257560 4605 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-48sxr" podUID="0c93dc9b-fba0-4d28-b8b3-8def5f66d466" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Oct 01 13:47:21 crc kubenswrapper[4605]: I1001 13:47:21.631126 4605 patch_prober.go:28] interesting pod/machine-config-daemon-zdjh7 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 13:47:21 crc kubenswrapper[4605]: I1001 13:47:21.631517 4605 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 13:47:21 crc kubenswrapper[4605]: I1001 13:47:21.686534 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8c172ce5-f64e-417d-9fc7-e06c5e443fbc-metrics-certs\") pod \"network-metrics-daemon-m7ph7\" (UID: \"8c172ce5-f64e-417d-9fc7-e06c5e443fbc\") " pod="openshift-multus/network-metrics-daemon-m7ph7" Oct 01 13:47:21 crc kubenswrapper[4605]: I1001 13:47:21.692716 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8c172ce5-f64e-417d-9fc7-e06c5e443fbc-metrics-certs\") pod \"network-metrics-daemon-m7ph7\" (UID: \"8c172ce5-f64e-417d-9fc7-e06c5e443fbc\") " pod="openshift-multus/network-metrics-daemon-m7ph7" Oct 01 13:47:21 crc kubenswrapper[4605]: I1001 13:47:21.939463 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-m7ph7" Oct 01 13:47:29 crc kubenswrapper[4605]: I1001 13:47:29.263311 4605 patch_prober.go:28] interesting pod/downloads-7954f5f757-48sxr container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Oct 01 13:47:29 crc kubenswrapper[4605]: I1001 13:47:29.263683 4605 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-48sxr" podUID="0c93dc9b-fba0-4d28-b8b3-8def5f66d466" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Oct 01 13:47:29 crc kubenswrapper[4605]: I1001 13:47:29.263341 4605 patch_prober.go:28] interesting pod/downloads-7954f5f757-48sxr container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Oct 01 13:47:29 crc kubenswrapper[4605]: I1001 13:47:29.264079 4605 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-48sxr" podUID="0c93dc9b-fba0-4d28-b8b3-8def5f66d466" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Oct 01 13:47:29 crc kubenswrapper[4605]: I1001 13:47:29.264119 4605 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-console/downloads-7954f5f757-48sxr" Oct 01 13:47:29 crc kubenswrapper[4605]: I1001 13:47:29.264685 4605 patch_prober.go:28] interesting pod/downloads-7954f5f757-48sxr container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Oct 01 13:47:29 crc kubenswrapper[4605]: I1001 13:47:29.264710 4605 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-48sxr" podUID="0c93dc9b-fba0-4d28-b8b3-8def5f66d466" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Oct 01 13:47:29 crc kubenswrapper[4605]: I1001 13:47:29.264681 4605 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="download-server" containerStatusID={"Type":"cri-o","ID":"f1516e23a93bead988e4da895be70e1b0460974bd0a1de685428cce218a143f4"} pod="openshift-console/downloads-7954f5f757-48sxr" containerMessage="Container download-server failed liveness probe, will be restarted" Oct 01 13:47:29 crc kubenswrapper[4605]: I1001 13:47:29.264759 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/downloads-7954f5f757-48sxr" podUID="0c93dc9b-fba0-4d28-b8b3-8def5f66d466" containerName="download-server" containerID="cri-o://f1516e23a93bead988e4da895be70e1b0460974bd0a1de685428cce218a143f4" gracePeriod=2 Oct 01 13:47:30 crc kubenswrapper[4605]: I1001 13:47:30.252114 4605 generic.go:334] "Generic (PLEG): container finished" podID="0c93dc9b-fba0-4d28-b8b3-8def5f66d466" containerID="f1516e23a93bead988e4da895be70e1b0460974bd0a1de685428cce218a143f4" exitCode=0 Oct 01 13:47:30 crc kubenswrapper[4605]: I1001 13:47:30.252205 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-48sxr" event={"ID":"0c93dc9b-fba0-4d28-b8b3-8def5f66d466","Type":"ContainerDied","Data":"f1516e23a93bead988e4da895be70e1b0460974bd0a1de685428cce218a143f4"} Oct 01 13:47:30 crc kubenswrapper[4605]: I1001 13:47:30.854057 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:47:39 crc kubenswrapper[4605]: I1001 13:47:39.256943 4605 patch_prober.go:28] interesting pod/downloads-7954f5f757-48sxr container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Oct 01 13:47:39 crc kubenswrapper[4605]: I1001 13:47:39.258245 4605 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-48sxr" podUID="0c93dc9b-fba0-4d28-b8b3-8def5f66d466" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Oct 01 13:47:39 crc kubenswrapper[4605]: I1001 13:47:39.811405 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-jxwcz" Oct 01 13:47:46 crc kubenswrapper[4605]: I1001 13:47:46.173143 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 13:47:49 crc kubenswrapper[4605]: I1001 13:47:49.256562 4605 patch_prober.go:28] interesting pod/downloads-7954f5f757-48sxr container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Oct 01 13:47:49 crc kubenswrapper[4605]: I1001 13:47:49.256628 4605 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-48sxr" podUID="0c93dc9b-fba0-4d28-b8b3-8def5f66d466" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Oct 01 13:47:51 crc kubenswrapper[4605]: I1001 13:47:51.631477 4605 patch_prober.go:28] interesting pod/machine-config-daemon-zdjh7 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 13:47:51 crc kubenswrapper[4605]: I1001 13:47:51.631777 4605 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 13:47:58 crc kubenswrapper[4605]: E1001 13:47:58.890846 4605 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Oct 01 13:47:58 crc kubenswrapper[4605]: E1001 13:47:58.891725 4605 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-nzs6p,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-mh782_openshift-marketplace(abf8ee39-bbbb-4d2a-9503-ed6b2ecbf051): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 01 13:47:58 crc kubenswrapper[4605]: E1001 13:47:58.892876 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-mh782" podUID="abf8ee39-bbbb-4d2a-9503-ed6b2ecbf051" Oct 01 13:47:59 crc kubenswrapper[4605]: I1001 13:47:59.256680 4605 patch_prober.go:28] interesting pod/downloads-7954f5f757-48sxr container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Oct 01 13:47:59 crc kubenswrapper[4605]: I1001 13:47:59.256748 4605 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-48sxr" podUID="0c93dc9b-fba0-4d28-b8b3-8def5f66d466" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Oct 01 13:47:59 crc kubenswrapper[4605]: E1001 13:47:59.736623 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-mh782" podUID="abf8ee39-bbbb-4d2a-9503-ed6b2ecbf051" Oct 01 13:47:59 crc kubenswrapper[4605]: E1001 13:47:59.806446 4605 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Oct 01 13:47:59 crc kubenswrapper[4605]: E1001 13:47:59.806860 4605 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-hqn7v,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-xnpnq_openshift-marketplace(6b4dc6f5-28e8-4c4e-bd69-59fa8a1d60a4): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 01 13:47:59 crc kubenswrapper[4605]: E1001 13:47:59.808066 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-xnpnq" podUID="6b4dc6f5-28e8-4c4e-bd69-59fa8a1d60a4" Oct 01 13:48:01 crc kubenswrapper[4605]: E1001 13:48:01.165948 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-xnpnq" podUID="6b4dc6f5-28e8-4c4e-bd69-59fa8a1d60a4" Oct 01 13:48:01 crc kubenswrapper[4605]: E1001 13:48:01.243731 4605 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Oct 01 13:48:01 crc kubenswrapper[4605]: E1001 13:48:01.243942 4605 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-xmqsf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-p7h8s_openshift-marketplace(fd03b5be-380c-4bc8-aea1-2467ebe8a390): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 01 13:48:01 crc kubenswrapper[4605]: E1001 13:48:01.245343 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-p7h8s" podUID="fd03b5be-380c-4bc8-aea1-2467ebe8a390" Oct 01 13:48:03 crc kubenswrapper[4605]: E1001 13:48:03.507308 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-p7h8s" podUID="fd03b5be-380c-4bc8-aea1-2467ebe8a390" Oct 01 13:48:03 crc kubenswrapper[4605]: E1001 13:48:03.619462 4605 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Oct 01 13:48:03 crc kubenswrapper[4605]: E1001 13:48:03.619632 4605 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-b7rp6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-z9jm5_openshift-marketplace(ad7c6809-d039-42e8-a1d9-3872a3751ad4): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 01 13:48:03 crc kubenswrapper[4605]: E1001 13:48:03.620731 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-z9jm5" podUID="ad7c6809-d039-42e8-a1d9-3872a3751ad4" Oct 01 13:48:03 crc kubenswrapper[4605]: E1001 13:48:03.662798 4605 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Oct 01 13:48:03 crc kubenswrapper[4605]: E1001 13:48:03.663079 4605 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-4hzng,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-z5nlj_openshift-marketplace(370d45c2-c748-4333-bbf5-9f2767f225f5): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 01 13:48:03 crc kubenswrapper[4605]: E1001 13:48:03.664432 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-z5nlj" podUID="370d45c2-c748-4333-bbf5-9f2767f225f5" Oct 01 13:48:03 crc kubenswrapper[4605]: E1001 13:48:03.665544 4605 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Oct 01 13:48:03 crc kubenswrapper[4605]: E1001 13:48:03.665667 4605 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-pt28q,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-tlgqx_openshift-marketplace(6639ba49-1401-4dc4-be03-f3f7bc43145c): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 01 13:48:03 crc kubenswrapper[4605]: E1001 13:48:03.666968 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-tlgqx" podUID="6639ba49-1401-4dc4-be03-f3f7bc43145c" Oct 01 13:48:03 crc kubenswrapper[4605]: E1001 13:48:03.705005 4605 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Oct 01 13:48:03 crc kubenswrapper[4605]: E1001 13:48:03.705145 4605 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-sdz8z,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-n2zhx_openshift-marketplace(69ee908d-c416-425e-acbf-0b212debb8dc): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 01 13:48:03 crc kubenswrapper[4605]: E1001 13:48:03.706381 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-n2zhx" podUID="69ee908d-c416-425e-acbf-0b212debb8dc" Oct 01 13:48:03 crc kubenswrapper[4605]: E1001 13:48:03.718146 4605 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Oct 01 13:48:03 crc kubenswrapper[4605]: E1001 13:48:03.718277 4605 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-6qfhb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-pcw9k_openshift-marketplace(877e04cf-b58e-470f-adc7-8f7fdb0ccddb): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 01 13:48:03 crc kubenswrapper[4605]: E1001 13:48:03.719613 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-pcw9k" podUID="877e04cf-b58e-470f-adc7-8f7fdb0ccddb" Oct 01 13:48:04 crc kubenswrapper[4605]: I1001 13:48:04.034999 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-m7ph7"] Oct 01 13:48:04 crc kubenswrapper[4605]: W1001 13:48:04.048717 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8c172ce5_f64e_417d_9fc7_e06c5e443fbc.slice/crio-d3372e3a9b53a11c13ae55e3b2095498b4be46f42e9e92304ea31183b7e946c8 WatchSource:0}: Error finding container d3372e3a9b53a11c13ae55e3b2095498b4be46f42e9e92304ea31183b7e946c8: Status 404 returned error can't find the container with id d3372e3a9b53a11c13ae55e3b2095498b4be46f42e9e92304ea31183b7e946c8 Oct 01 13:48:04 crc kubenswrapper[4605]: I1001 13:48:04.444896 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-m7ph7" event={"ID":"8c172ce5-f64e-417d-9fc7-e06c5e443fbc","Type":"ContainerStarted","Data":"043c8ec94169510f98a8a791f4eb500ef7c639475e08d836a6c2315437b1d342"} Oct 01 13:48:04 crc kubenswrapper[4605]: I1001 13:48:04.445257 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-m7ph7" event={"ID":"8c172ce5-f64e-417d-9fc7-e06c5e443fbc","Type":"ContainerStarted","Data":"d3372e3a9b53a11c13ae55e3b2095498b4be46f42e9e92304ea31183b7e946c8"} Oct 01 13:48:04 crc kubenswrapper[4605]: I1001 13:48:04.447260 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-48sxr" event={"ID":"0c93dc9b-fba0-4d28-b8b3-8def5f66d466","Type":"ContainerStarted","Data":"9ec4b83fb2d40b113150aaabad47ff7d68c998b53bd2938b79bdc71ec842e18b"} Oct 01 13:48:04 crc kubenswrapper[4605]: I1001 13:48:04.447645 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-48sxr" Oct 01 13:48:04 crc kubenswrapper[4605]: I1001 13:48:04.448038 4605 patch_prober.go:28] interesting pod/downloads-7954f5f757-48sxr container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Oct 01 13:48:04 crc kubenswrapper[4605]: I1001 13:48:04.448126 4605 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-48sxr" podUID="0c93dc9b-fba0-4d28-b8b3-8def5f66d466" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Oct 01 13:48:04 crc kubenswrapper[4605]: E1001 13:48:04.449111 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-pcw9k" podUID="877e04cf-b58e-470f-adc7-8f7fdb0ccddb" Oct 01 13:48:04 crc kubenswrapper[4605]: E1001 13:48:04.449211 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-z9jm5" podUID="ad7c6809-d039-42e8-a1d9-3872a3751ad4" Oct 01 13:48:04 crc kubenswrapper[4605]: E1001 13:48:04.449364 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-n2zhx" podUID="69ee908d-c416-425e-acbf-0b212debb8dc" Oct 01 13:48:04 crc kubenswrapper[4605]: E1001 13:48:04.449794 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-z5nlj" podUID="370d45c2-c748-4333-bbf5-9f2767f225f5" Oct 01 13:48:04 crc kubenswrapper[4605]: E1001 13:48:04.449844 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-tlgqx" podUID="6639ba49-1401-4dc4-be03-f3f7bc43145c" Oct 01 13:48:05 crc kubenswrapper[4605]: I1001 13:48:05.453616 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-m7ph7" event={"ID":"8c172ce5-f64e-417d-9fc7-e06c5e443fbc","Type":"ContainerStarted","Data":"92e865881a5f8ea914a8463a6fb4972f9140dfdd9f7f3a0d665caaa0b9fe5189"} Oct 01 13:48:05 crc kubenswrapper[4605]: I1001 13:48:05.455243 4605 patch_prober.go:28] interesting pod/downloads-7954f5f757-48sxr container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Oct 01 13:48:05 crc kubenswrapper[4605]: I1001 13:48:05.455313 4605 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-48sxr" podUID="0c93dc9b-fba0-4d28-b8b3-8def5f66d466" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Oct 01 13:48:06 crc kubenswrapper[4605]: I1001 13:48:06.476187 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-m7ph7" podStartSLOduration=187.47617295 podStartE2EDuration="3m7.47617295s" podCreationTimestamp="2025-10-01 13:44:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 13:48:06.473365057 +0000 UTC m=+209.217341265" watchObservedRunningTime="2025-10-01 13:48:06.47617295 +0000 UTC m=+209.220149158" Oct 01 13:48:09 crc kubenswrapper[4605]: I1001 13:48:09.262910 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-48sxr" Oct 01 13:48:21 crc kubenswrapper[4605]: I1001 13:48:21.631398 4605 patch_prober.go:28] interesting pod/machine-config-daemon-zdjh7 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 13:48:21 crc kubenswrapper[4605]: I1001 13:48:21.632005 4605 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 13:48:21 crc kubenswrapper[4605]: I1001 13:48:21.632055 4605 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" Oct 01 13:48:21 crc kubenswrapper[4605]: I1001 13:48:21.632701 4605 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ae5b8e3f4bd159c632b04545707c7140ba6fcee21a3a3847d5e7f2b9e41b9178"} pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 01 13:48:21 crc kubenswrapper[4605]: I1001 13:48:21.632749 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" containerName="machine-config-daemon" containerID="cri-o://ae5b8e3f4bd159c632b04545707c7140ba6fcee21a3a3847d5e7f2b9e41b9178" gracePeriod=600 Oct 01 13:48:22 crc kubenswrapper[4605]: I1001 13:48:22.540548 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mh782" event={"ID":"abf8ee39-bbbb-4d2a-9503-ed6b2ecbf051","Type":"ContainerStarted","Data":"1ac5a1b872b977fb888983ee44edc2b9edc7312388dce47ee1c638188be21d24"} Oct 01 13:48:22 crc kubenswrapper[4605]: I1001 13:48:22.543077 4605 generic.go:334] "Generic (PLEG): container finished" podID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" containerID="ae5b8e3f4bd159c632b04545707c7140ba6fcee21a3a3847d5e7f2b9e41b9178" exitCode=0 Oct 01 13:48:22 crc kubenswrapper[4605]: I1001 13:48:22.543155 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" event={"ID":"f3023060-c8ae-492b-b1cb-a418d9a8e59f","Type":"ContainerDied","Data":"ae5b8e3f4bd159c632b04545707c7140ba6fcee21a3a3847d5e7f2b9e41b9178"} Oct 01 13:48:23 crc kubenswrapper[4605]: I1001 13:48:23.548730 4605 generic.go:334] "Generic (PLEG): container finished" podID="abf8ee39-bbbb-4d2a-9503-ed6b2ecbf051" containerID="1ac5a1b872b977fb888983ee44edc2b9edc7312388dce47ee1c638188be21d24" exitCode=0 Oct 01 13:48:23 crc kubenswrapper[4605]: I1001 13:48:23.548778 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mh782" event={"ID":"abf8ee39-bbbb-4d2a-9503-ed6b2ecbf051","Type":"ContainerDied","Data":"1ac5a1b872b977fb888983ee44edc2b9edc7312388dce47ee1c638188be21d24"} Oct 01 13:48:24 crc kubenswrapper[4605]: I1001 13:48:24.557055 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" event={"ID":"f3023060-c8ae-492b-b1cb-a418d9a8e59f","Type":"ContainerStarted","Data":"79801dc6bc063ddb6a797ea140823ed637f359b821d68c718f59b852dd6781bd"} Oct 01 13:48:26 crc kubenswrapper[4605]: I1001 13:48:26.566872 4605 generic.go:334] "Generic (PLEG): container finished" podID="877e04cf-b58e-470f-adc7-8f7fdb0ccddb" containerID="3455e2323e0b7293b216dc6b4f3798bcbd195fd6416ed9cc07207b5704edfafe" exitCode=0 Oct 01 13:48:26 crc kubenswrapper[4605]: I1001 13:48:26.566967 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pcw9k" event={"ID":"877e04cf-b58e-470f-adc7-8f7fdb0ccddb","Type":"ContainerDied","Data":"3455e2323e0b7293b216dc6b4f3798bcbd195fd6416ed9cc07207b5704edfafe"} Oct 01 13:48:26 crc kubenswrapper[4605]: I1001 13:48:26.570004 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mh782" event={"ID":"abf8ee39-bbbb-4d2a-9503-ed6b2ecbf051","Type":"ContainerStarted","Data":"02fda48d590d7851f332cd531877c860eed61c285a4ed45ee010ef4d68c04274"} Oct 01 13:48:26 crc kubenswrapper[4605]: I1001 13:48:26.573526 4605 generic.go:334] "Generic (PLEG): container finished" podID="ad7c6809-d039-42e8-a1d9-3872a3751ad4" containerID="a9b15f2b3ae06262259620ceaea72fe7106e7bdef8947d95e46271e5b6312186" exitCode=0 Oct 01 13:48:26 crc kubenswrapper[4605]: I1001 13:48:26.573604 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-z9jm5" event={"ID":"ad7c6809-d039-42e8-a1d9-3872a3751ad4","Type":"ContainerDied","Data":"a9b15f2b3ae06262259620ceaea72fe7106e7bdef8947d95e46271e5b6312186"} Oct 01 13:48:26 crc kubenswrapper[4605]: I1001 13:48:26.591663 4605 generic.go:334] "Generic (PLEG): container finished" podID="69ee908d-c416-425e-acbf-0b212debb8dc" containerID="370201f9b912c0ef098e801c2c4b6e26ecad77865350c632d6b8e58630a0529e" exitCode=0 Oct 01 13:48:26 crc kubenswrapper[4605]: I1001 13:48:26.591753 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-n2zhx" event={"ID":"69ee908d-c416-425e-acbf-0b212debb8dc","Type":"ContainerDied","Data":"370201f9b912c0ef098e801c2c4b6e26ecad77865350c632d6b8e58630a0529e"} Oct 01 13:48:26 crc kubenswrapper[4605]: I1001 13:48:26.597050 4605 generic.go:334] "Generic (PLEG): container finished" podID="370d45c2-c748-4333-bbf5-9f2767f225f5" containerID="133457b2c528604fff5e2bdd4b5d023bb8dac2116a71c5363cacc5e46ae7283d" exitCode=0 Oct 01 13:48:26 crc kubenswrapper[4605]: I1001 13:48:26.597130 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z5nlj" event={"ID":"370d45c2-c748-4333-bbf5-9f2767f225f5","Type":"ContainerDied","Data":"133457b2c528604fff5e2bdd4b5d023bb8dac2116a71c5363cacc5e46ae7283d"} Oct 01 13:48:26 crc kubenswrapper[4605]: I1001 13:48:26.605531 4605 generic.go:334] "Generic (PLEG): container finished" podID="6639ba49-1401-4dc4-be03-f3f7bc43145c" containerID="049d3f2796eb2b0a4c356275b4ab83e7d73cd235f1410fb2caf2f4093728afee" exitCode=0 Oct 01 13:48:26 crc kubenswrapper[4605]: I1001 13:48:26.605579 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tlgqx" event={"ID":"6639ba49-1401-4dc4-be03-f3f7bc43145c","Type":"ContainerDied","Data":"049d3f2796eb2b0a4c356275b4ab83e7d73cd235f1410fb2caf2f4093728afee"} Oct 01 13:48:26 crc kubenswrapper[4605]: I1001 13:48:26.623549 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-mh782" podStartSLOduration=3.803453674 podStartE2EDuration="1m14.623526875s" podCreationTimestamp="2025-10-01 13:47:12 +0000 UTC" firstStartedPulling="2025-10-01 13:47:14.904473311 +0000 UTC m=+157.648449519" lastFinishedPulling="2025-10-01 13:48:25.724546512 +0000 UTC m=+228.468522720" observedRunningTime="2025-10-01 13:48:26.618827643 +0000 UTC m=+229.362803871" watchObservedRunningTime="2025-10-01 13:48:26.623526875 +0000 UTC m=+229.367503083" Oct 01 13:48:27 crc kubenswrapper[4605]: I1001 13:48:27.617987 4605 generic.go:334] "Generic (PLEG): container finished" podID="fd03b5be-380c-4bc8-aea1-2467ebe8a390" containerID="3966dec7aff9f90cd2b367ecd82d1a71628e7c733f6ae5ac2fd9e443bd86a3ab" exitCode=0 Oct 01 13:48:27 crc kubenswrapper[4605]: I1001 13:48:27.618551 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-p7h8s" event={"ID":"fd03b5be-380c-4bc8-aea1-2467ebe8a390","Type":"ContainerDied","Data":"3966dec7aff9f90cd2b367ecd82d1a71628e7c733f6ae5ac2fd9e443bd86a3ab"} Oct 01 13:48:27 crc kubenswrapper[4605]: I1001 13:48:27.629063 4605 generic.go:334] "Generic (PLEG): container finished" podID="6b4dc6f5-28e8-4c4e-bd69-59fa8a1d60a4" containerID="5bad6e862677d8f2643561897b8598523b01d56382f59800064637be694858b1" exitCode=0 Oct 01 13:48:27 crc kubenswrapper[4605]: I1001 13:48:27.629611 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xnpnq" event={"ID":"6b4dc6f5-28e8-4c4e-bd69-59fa8a1d60a4","Type":"ContainerDied","Data":"5bad6e862677d8f2643561897b8598523b01d56382f59800064637be694858b1"} Oct 01 13:48:31 crc kubenswrapper[4605]: I1001 13:48:31.651563 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z5nlj" event={"ID":"370d45c2-c748-4333-bbf5-9f2767f225f5","Type":"ContainerStarted","Data":"1f796fbd2b97c4de216532222df4369dd6bffcf4e02bf7bce4be5de98d0455c3"} Oct 01 13:48:32 crc kubenswrapper[4605]: I1001 13:48:32.659725 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-mh782" Oct 01 13:48:32 crc kubenswrapper[4605]: I1001 13:48:32.660632 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-mh782" Oct 01 13:48:32 crc kubenswrapper[4605]: I1001 13:48:32.683690 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-z5nlj" podStartSLOduration=6.2635120109999995 podStartE2EDuration="1m24.683666712s" podCreationTimestamp="2025-10-01 13:47:08 +0000 UTC" firstStartedPulling="2025-10-01 13:47:11.659877787 +0000 UTC m=+154.403853995" lastFinishedPulling="2025-10-01 13:48:30.080032498 +0000 UTC m=+232.824008696" observedRunningTime="2025-10-01 13:48:32.681772903 +0000 UTC m=+235.425749111" watchObservedRunningTime="2025-10-01 13:48:32.683666712 +0000 UTC m=+235.427642950" Oct 01 13:48:33 crc kubenswrapper[4605]: I1001 13:48:33.149816 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-mh782" Oct 01 13:48:33 crc kubenswrapper[4605]: I1001 13:48:33.698703 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-mh782" Oct 01 13:48:34 crc kubenswrapper[4605]: I1001 13:48:34.667372 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-z9jm5" event={"ID":"ad7c6809-d039-42e8-a1d9-3872a3751ad4","Type":"ContainerStarted","Data":"749ea6c326448517b5b62671e90d58a982bdae67ec468f12cf0b1a8dad58f255"} Oct 01 13:48:34 crc kubenswrapper[4605]: I1001 13:48:34.669490 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-n2zhx" event={"ID":"69ee908d-c416-425e-acbf-0b212debb8dc","Type":"ContainerStarted","Data":"c0559d0cb6738105c0ebefc147115f66d3dc0ff2f802349969e9c74b63663748"} Oct 01 13:48:34 crc kubenswrapper[4605]: I1001 13:48:34.672894 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tlgqx" event={"ID":"6639ba49-1401-4dc4-be03-f3f7bc43145c","Type":"ContainerStarted","Data":"8b914bd61cf55e56b86fa03ef9f9dd072d858c47c4d726630ed8a4bb4d7c810b"} Oct 01 13:48:34 crc kubenswrapper[4605]: I1001 13:48:34.674634 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-p7h8s" event={"ID":"fd03b5be-380c-4bc8-aea1-2467ebe8a390","Type":"ContainerStarted","Data":"1f3bf5d2ba2aa154a818d8932d87b9ec7a3de3281efc343141b81494db058af2"} Oct 01 13:48:34 crc kubenswrapper[4605]: I1001 13:48:34.676123 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pcw9k" event={"ID":"877e04cf-b58e-470f-adc7-8f7fdb0ccddb","Type":"ContainerStarted","Data":"b56ae2e3187ee97f84412a6883329488752ab96662194205ee5d9129feda5e1f"} Oct 01 13:48:34 crc kubenswrapper[4605]: I1001 13:48:34.728904 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-z9jm5" podStartSLOduration=2.619259065 podStartE2EDuration="1m23.728880033s" podCreationTimestamp="2025-10-01 13:47:11 +0000 UTC" firstStartedPulling="2025-10-01 13:47:12.780727757 +0000 UTC m=+155.524703965" lastFinishedPulling="2025-10-01 13:48:33.890348735 +0000 UTC m=+236.634324933" observedRunningTime="2025-10-01 13:48:34.692317346 +0000 UTC m=+237.436293564" watchObservedRunningTime="2025-10-01 13:48:34.728880033 +0000 UTC m=+237.472856231" Oct 01 13:48:34 crc kubenswrapper[4605]: I1001 13:48:34.733938 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-p7h8s" podStartSLOduration=4.5893455 podStartE2EDuration="1m26.733915914s" podCreationTimestamp="2025-10-01 13:47:08 +0000 UTC" firstStartedPulling="2025-10-01 13:47:11.672780861 +0000 UTC m=+154.416757069" lastFinishedPulling="2025-10-01 13:48:33.817351275 +0000 UTC m=+236.561327483" observedRunningTime="2025-10-01 13:48:34.712023387 +0000 UTC m=+237.455999595" watchObservedRunningTime="2025-10-01 13:48:34.733915914 +0000 UTC m=+237.477892122" Oct 01 13:48:34 crc kubenswrapper[4605]: I1001 13:48:34.739564 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-n2zhx" podStartSLOduration=3.601986858 podStartE2EDuration="1m25.73954318s" podCreationTimestamp="2025-10-01 13:47:09 +0000 UTC" firstStartedPulling="2025-10-01 13:47:11.654495227 +0000 UTC m=+154.398471435" lastFinishedPulling="2025-10-01 13:48:33.792051549 +0000 UTC m=+236.536027757" observedRunningTime="2025-10-01 13:48:34.733571595 +0000 UTC m=+237.477547803" watchObservedRunningTime="2025-10-01 13:48:34.73954318 +0000 UTC m=+237.483519388" Oct 01 13:48:34 crc kubenswrapper[4605]: I1001 13:48:34.757159 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-tlgqx" podStartSLOduration=3.668270334 podStartE2EDuration="1m25.757138205s" podCreationTimestamp="2025-10-01 13:47:09 +0000 UTC" firstStartedPulling="2025-10-01 13:47:11.667644338 +0000 UTC m=+154.411620546" lastFinishedPulling="2025-10-01 13:48:33.756512209 +0000 UTC m=+236.500488417" observedRunningTime="2025-10-01 13:48:34.74764898 +0000 UTC m=+237.491625198" watchObservedRunningTime="2025-10-01 13:48:34.757138205 +0000 UTC m=+237.501114423" Oct 01 13:48:34 crc kubenswrapper[4605]: I1001 13:48:34.796844 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-pcw9k" podStartSLOduration=4.079871225 podStartE2EDuration="1m23.796769732s" podCreationTimestamp="2025-10-01 13:47:11 +0000 UTC" firstStartedPulling="2025-10-01 13:47:13.817990462 +0000 UTC m=+156.561966670" lastFinishedPulling="2025-10-01 13:48:33.534888969 +0000 UTC m=+236.278865177" observedRunningTime="2025-10-01 13:48:34.779958476 +0000 UTC m=+237.523934684" watchObservedRunningTime="2025-10-01 13:48:34.796769732 +0000 UTC m=+237.540745930" Oct 01 13:48:34 crc kubenswrapper[4605]: I1001 13:48:34.797468 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-mh782"] Oct 01 13:48:35 crc kubenswrapper[4605]: I1001 13:48:35.682528 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xnpnq" event={"ID":"6b4dc6f5-28e8-4c4e-bd69-59fa8a1d60a4","Type":"ContainerStarted","Data":"aa7d9d882a3894a1c691b1d18baa45158df6e76aacbd5000e3d225295f609eee"} Oct 01 13:48:35 crc kubenswrapper[4605]: I1001 13:48:35.698116 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-xnpnq" podStartSLOduration=4.145084895 podStartE2EDuration="1m25.698074836s" podCreationTimestamp="2025-10-01 13:47:10 +0000 UTC" firstStartedPulling="2025-10-01 13:47:12.780365988 +0000 UTC m=+155.524342196" lastFinishedPulling="2025-10-01 13:48:34.333355929 +0000 UTC m=+237.077332137" observedRunningTime="2025-10-01 13:48:35.697328346 +0000 UTC m=+238.441304554" watchObservedRunningTime="2025-10-01 13:48:35.698074836 +0000 UTC m=+238.442051044" Oct 01 13:48:36 crc kubenswrapper[4605]: I1001 13:48:36.686074 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-mh782" podUID="abf8ee39-bbbb-4d2a-9503-ed6b2ecbf051" containerName="registry-server" containerID="cri-o://02fda48d590d7851f332cd531877c860eed61c285a4ed45ee010ef4d68c04274" gracePeriod=2 Oct 01 13:48:37 crc kubenswrapper[4605]: I1001 13:48:37.693409 4605 generic.go:334] "Generic (PLEG): container finished" podID="abf8ee39-bbbb-4d2a-9503-ed6b2ecbf051" containerID="02fda48d590d7851f332cd531877c860eed61c285a4ed45ee010ef4d68c04274" exitCode=0 Oct 01 13:48:37 crc kubenswrapper[4605]: I1001 13:48:37.693475 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mh782" event={"ID":"abf8ee39-bbbb-4d2a-9503-ed6b2ecbf051","Type":"ContainerDied","Data":"02fda48d590d7851f332cd531877c860eed61c285a4ed45ee010ef4d68c04274"} Oct 01 13:48:38 crc kubenswrapper[4605]: I1001 13:48:38.093153 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mh782" Oct 01 13:48:38 crc kubenswrapper[4605]: I1001 13:48:38.208939 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/abf8ee39-bbbb-4d2a-9503-ed6b2ecbf051-catalog-content\") pod \"abf8ee39-bbbb-4d2a-9503-ed6b2ecbf051\" (UID: \"abf8ee39-bbbb-4d2a-9503-ed6b2ecbf051\") " Oct 01 13:48:38 crc kubenswrapper[4605]: I1001 13:48:38.208993 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/abf8ee39-bbbb-4d2a-9503-ed6b2ecbf051-utilities\") pod \"abf8ee39-bbbb-4d2a-9503-ed6b2ecbf051\" (UID: \"abf8ee39-bbbb-4d2a-9503-ed6b2ecbf051\") " Oct 01 13:48:38 crc kubenswrapper[4605]: I1001 13:48:38.209060 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzs6p\" (UniqueName: \"kubernetes.io/projected/abf8ee39-bbbb-4d2a-9503-ed6b2ecbf051-kube-api-access-nzs6p\") pod \"abf8ee39-bbbb-4d2a-9503-ed6b2ecbf051\" (UID: \"abf8ee39-bbbb-4d2a-9503-ed6b2ecbf051\") " Oct 01 13:48:38 crc kubenswrapper[4605]: I1001 13:48:38.209982 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/abf8ee39-bbbb-4d2a-9503-ed6b2ecbf051-utilities" (OuterVolumeSpecName: "utilities") pod "abf8ee39-bbbb-4d2a-9503-ed6b2ecbf051" (UID: "abf8ee39-bbbb-4d2a-9503-ed6b2ecbf051"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 13:48:38 crc kubenswrapper[4605]: I1001 13:48:38.217567 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/abf8ee39-bbbb-4d2a-9503-ed6b2ecbf051-kube-api-access-nzs6p" (OuterVolumeSpecName: "kube-api-access-nzs6p") pod "abf8ee39-bbbb-4d2a-9503-ed6b2ecbf051" (UID: "abf8ee39-bbbb-4d2a-9503-ed6b2ecbf051"). InnerVolumeSpecName "kube-api-access-nzs6p". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:48:38 crc kubenswrapper[4605]: I1001 13:48:38.298016 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/abf8ee39-bbbb-4d2a-9503-ed6b2ecbf051-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "abf8ee39-bbbb-4d2a-9503-ed6b2ecbf051" (UID: "abf8ee39-bbbb-4d2a-9503-ed6b2ecbf051"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 13:48:38 crc kubenswrapper[4605]: I1001 13:48:38.310394 4605 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/abf8ee39-bbbb-4d2a-9503-ed6b2ecbf051-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 13:48:38 crc kubenswrapper[4605]: I1001 13:48:38.310435 4605 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/abf8ee39-bbbb-4d2a-9503-ed6b2ecbf051-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 13:48:38 crc kubenswrapper[4605]: I1001 13:48:38.310445 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzs6p\" (UniqueName: \"kubernetes.io/projected/abf8ee39-bbbb-4d2a-9503-ed6b2ecbf051-kube-api-access-nzs6p\") on node \"crc\" DevicePath \"\"" Oct 01 13:48:38 crc kubenswrapper[4605]: I1001 13:48:38.702535 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mh782" event={"ID":"abf8ee39-bbbb-4d2a-9503-ed6b2ecbf051","Type":"ContainerDied","Data":"a260fa80b6a438ff74a6c9e163c0c41b8314a474e5c42aa1495e7ddd14358b4a"} Oct 01 13:48:38 crc kubenswrapper[4605]: I1001 13:48:38.702592 4605 scope.go:117] "RemoveContainer" containerID="02fda48d590d7851f332cd531877c860eed61c285a4ed45ee010ef4d68c04274" Oct 01 13:48:38 crc kubenswrapper[4605]: I1001 13:48:38.702669 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mh782" Oct 01 13:48:38 crc kubenswrapper[4605]: I1001 13:48:38.723217 4605 scope.go:117] "RemoveContainer" containerID="1ac5a1b872b977fb888983ee44edc2b9edc7312388dce47ee1c638188be21d24" Oct 01 13:48:38 crc kubenswrapper[4605]: I1001 13:48:38.741194 4605 scope.go:117] "RemoveContainer" containerID="32ccfab0b0c952cfbdae39884ed3466fd736d6c5c9d248552d1a1eff0848cdd7" Oct 01 13:48:38 crc kubenswrapper[4605]: I1001 13:48:38.742792 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-mh782"] Oct 01 13:48:38 crc kubenswrapper[4605]: I1001 13:48:38.746305 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-mh782"] Oct 01 13:48:39 crc kubenswrapper[4605]: I1001 13:48:39.150370 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-z5nlj" Oct 01 13:48:39 crc kubenswrapper[4605]: I1001 13:48:39.150713 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-z5nlj" Oct 01 13:48:39 crc kubenswrapper[4605]: I1001 13:48:39.196782 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-z5nlj" Oct 01 13:48:39 crc kubenswrapper[4605]: I1001 13:48:39.275455 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-p7h8s" Oct 01 13:48:39 crc kubenswrapper[4605]: I1001 13:48:39.275543 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-p7h8s" Oct 01 13:48:39 crc kubenswrapper[4605]: I1001 13:48:39.361815 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-p7h8s" Oct 01 13:48:39 crc kubenswrapper[4605]: I1001 13:48:39.519680 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-n2zhx" Oct 01 13:48:39 crc kubenswrapper[4605]: I1001 13:48:39.519724 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-n2zhx" Oct 01 13:48:39 crc kubenswrapper[4605]: I1001 13:48:39.560133 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-n2zhx" Oct 01 13:48:39 crc kubenswrapper[4605]: I1001 13:48:39.703107 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-tlgqx" Oct 01 13:48:39 crc kubenswrapper[4605]: I1001 13:48:39.703151 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-tlgqx" Oct 01 13:48:39 crc kubenswrapper[4605]: I1001 13:48:39.739657 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-tlgqx" Oct 01 13:48:39 crc kubenswrapper[4605]: I1001 13:48:39.740029 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-n2zhx" Oct 01 13:48:39 crc kubenswrapper[4605]: I1001 13:48:39.749121 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-z5nlj" Oct 01 13:48:39 crc kubenswrapper[4605]: I1001 13:48:39.751079 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-p7h8s" Oct 01 13:48:39 crc kubenswrapper[4605]: I1001 13:48:39.932891 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="abf8ee39-bbbb-4d2a-9503-ed6b2ecbf051" path="/var/lib/kubelet/pods/abf8ee39-bbbb-4d2a-9503-ed6b2ecbf051/volumes" Oct 01 13:48:40 crc kubenswrapper[4605]: I1001 13:48:40.750891 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-tlgqx" Oct 01 13:48:41 crc kubenswrapper[4605]: I1001 13:48:41.286066 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-xnpnq" Oct 01 13:48:41 crc kubenswrapper[4605]: I1001 13:48:41.286129 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-xnpnq" Oct 01 13:48:41 crc kubenswrapper[4605]: I1001 13:48:41.325513 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-xnpnq" Oct 01 13:48:41 crc kubenswrapper[4605]: I1001 13:48:41.685741 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-z9jm5" Oct 01 13:48:41 crc kubenswrapper[4605]: I1001 13:48:41.685791 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-z9jm5" Oct 01 13:48:41 crc kubenswrapper[4605]: I1001 13:48:41.725546 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-z9jm5" Oct 01 13:48:41 crc kubenswrapper[4605]: I1001 13:48:41.761340 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-xnpnq" Oct 01 13:48:41 crc kubenswrapper[4605]: I1001 13:48:41.765741 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-z9jm5" Oct 01 13:48:42 crc kubenswrapper[4605]: I1001 13:48:42.280232 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-pcw9k" Oct 01 13:48:42 crc kubenswrapper[4605]: I1001 13:48:42.281322 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-pcw9k" Oct 01 13:48:42 crc kubenswrapper[4605]: I1001 13:48:42.322730 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-pcw9k" Oct 01 13:48:42 crc kubenswrapper[4605]: I1001 13:48:42.758611 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-pcw9k" Oct 01 13:48:42 crc kubenswrapper[4605]: I1001 13:48:42.993688 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-n2zhx"] Oct 01 13:48:42 crc kubenswrapper[4605]: I1001 13:48:42.993891 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-n2zhx" podUID="69ee908d-c416-425e-acbf-0b212debb8dc" containerName="registry-server" containerID="cri-o://c0559d0cb6738105c0ebefc147115f66d3dc0ff2f802349969e9c74b63663748" gracePeriod=2 Oct 01 13:48:43 crc kubenswrapper[4605]: I1001 13:48:43.197500 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-tlgqx"] Oct 01 13:48:43 crc kubenswrapper[4605]: I1001 13:48:43.198038 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-tlgqx" podUID="6639ba49-1401-4dc4-be03-f3f7bc43145c" containerName="registry-server" containerID="cri-o://8b914bd61cf55e56b86fa03ef9f9dd072d858c47c4d726630ed8a4bb4d7c810b" gracePeriod=2 Oct 01 13:48:43 crc kubenswrapper[4605]: I1001 13:48:43.531618 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-n2zhx" Oct 01 13:48:43 crc kubenswrapper[4605]: I1001 13:48:43.682457 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/69ee908d-c416-425e-acbf-0b212debb8dc-catalog-content\") pod \"69ee908d-c416-425e-acbf-0b212debb8dc\" (UID: \"69ee908d-c416-425e-acbf-0b212debb8dc\") " Oct 01 13:48:43 crc kubenswrapper[4605]: I1001 13:48:43.682560 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sdz8z\" (UniqueName: \"kubernetes.io/projected/69ee908d-c416-425e-acbf-0b212debb8dc-kube-api-access-sdz8z\") pod \"69ee908d-c416-425e-acbf-0b212debb8dc\" (UID: \"69ee908d-c416-425e-acbf-0b212debb8dc\") " Oct 01 13:48:43 crc kubenswrapper[4605]: I1001 13:48:43.682583 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/69ee908d-c416-425e-acbf-0b212debb8dc-utilities\") pod \"69ee908d-c416-425e-acbf-0b212debb8dc\" (UID: \"69ee908d-c416-425e-acbf-0b212debb8dc\") " Oct 01 13:48:43 crc kubenswrapper[4605]: I1001 13:48:43.683531 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/69ee908d-c416-425e-acbf-0b212debb8dc-utilities" (OuterVolumeSpecName: "utilities") pod "69ee908d-c416-425e-acbf-0b212debb8dc" (UID: "69ee908d-c416-425e-acbf-0b212debb8dc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 13:48:43 crc kubenswrapper[4605]: I1001 13:48:43.698242 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/69ee908d-c416-425e-acbf-0b212debb8dc-kube-api-access-sdz8z" (OuterVolumeSpecName: "kube-api-access-sdz8z") pod "69ee908d-c416-425e-acbf-0b212debb8dc" (UID: "69ee908d-c416-425e-acbf-0b212debb8dc"). InnerVolumeSpecName "kube-api-access-sdz8z". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:48:43 crc kubenswrapper[4605]: I1001 13:48:43.730788 4605 generic.go:334] "Generic (PLEG): container finished" podID="6639ba49-1401-4dc4-be03-f3f7bc43145c" containerID="8b914bd61cf55e56b86fa03ef9f9dd072d858c47c4d726630ed8a4bb4d7c810b" exitCode=0 Oct 01 13:48:43 crc kubenswrapper[4605]: I1001 13:48:43.730879 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tlgqx" event={"ID":"6639ba49-1401-4dc4-be03-f3f7bc43145c","Type":"ContainerDied","Data":"8b914bd61cf55e56b86fa03ef9f9dd072d858c47c4d726630ed8a4bb4d7c810b"} Oct 01 13:48:43 crc kubenswrapper[4605]: I1001 13:48:43.732877 4605 generic.go:334] "Generic (PLEG): container finished" podID="69ee908d-c416-425e-acbf-0b212debb8dc" containerID="c0559d0cb6738105c0ebefc147115f66d3dc0ff2f802349969e9c74b63663748" exitCode=0 Oct 01 13:48:43 crc kubenswrapper[4605]: I1001 13:48:43.732924 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-n2zhx" Oct 01 13:48:43 crc kubenswrapper[4605]: I1001 13:48:43.732965 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-n2zhx" event={"ID":"69ee908d-c416-425e-acbf-0b212debb8dc","Type":"ContainerDied","Data":"c0559d0cb6738105c0ebefc147115f66d3dc0ff2f802349969e9c74b63663748"} Oct 01 13:48:43 crc kubenswrapper[4605]: I1001 13:48:43.733017 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-n2zhx" event={"ID":"69ee908d-c416-425e-acbf-0b212debb8dc","Type":"ContainerDied","Data":"ed7b6778382991ba795a0e33efcd22804bbac51561afe311d2069f61e6f6030a"} Oct 01 13:48:43 crc kubenswrapper[4605]: I1001 13:48:43.733041 4605 scope.go:117] "RemoveContainer" containerID="c0559d0cb6738105c0ebefc147115f66d3dc0ff2f802349969e9c74b63663748" Oct 01 13:48:43 crc kubenswrapper[4605]: I1001 13:48:43.743690 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/69ee908d-c416-425e-acbf-0b212debb8dc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "69ee908d-c416-425e-acbf-0b212debb8dc" (UID: "69ee908d-c416-425e-acbf-0b212debb8dc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 13:48:43 crc kubenswrapper[4605]: I1001 13:48:43.749438 4605 scope.go:117] "RemoveContainer" containerID="370201f9b912c0ef098e801c2c4b6e26ecad77865350c632d6b8e58630a0529e" Oct 01 13:48:43 crc kubenswrapper[4605]: I1001 13:48:43.764708 4605 scope.go:117] "RemoveContainer" containerID="099615b2d8586dacf63fa6e85e621bdf7fd9a60239bbc13517eba1250255b798" Oct 01 13:48:43 crc kubenswrapper[4605]: I1001 13:48:43.779222 4605 scope.go:117] "RemoveContainer" containerID="c0559d0cb6738105c0ebefc147115f66d3dc0ff2f802349969e9c74b63663748" Oct 01 13:48:43 crc kubenswrapper[4605]: E1001 13:48:43.779723 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c0559d0cb6738105c0ebefc147115f66d3dc0ff2f802349969e9c74b63663748\": container with ID starting with c0559d0cb6738105c0ebefc147115f66d3dc0ff2f802349969e9c74b63663748 not found: ID does not exist" containerID="c0559d0cb6738105c0ebefc147115f66d3dc0ff2f802349969e9c74b63663748" Oct 01 13:48:43 crc kubenswrapper[4605]: I1001 13:48:43.779757 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c0559d0cb6738105c0ebefc147115f66d3dc0ff2f802349969e9c74b63663748"} err="failed to get container status \"c0559d0cb6738105c0ebefc147115f66d3dc0ff2f802349969e9c74b63663748\": rpc error: code = NotFound desc = could not find container \"c0559d0cb6738105c0ebefc147115f66d3dc0ff2f802349969e9c74b63663748\": container with ID starting with c0559d0cb6738105c0ebefc147115f66d3dc0ff2f802349969e9c74b63663748 not found: ID does not exist" Oct 01 13:48:43 crc kubenswrapper[4605]: I1001 13:48:43.779778 4605 scope.go:117] "RemoveContainer" containerID="370201f9b912c0ef098e801c2c4b6e26ecad77865350c632d6b8e58630a0529e" Oct 01 13:48:43 crc kubenswrapper[4605]: E1001 13:48:43.780176 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"370201f9b912c0ef098e801c2c4b6e26ecad77865350c632d6b8e58630a0529e\": container with ID starting with 370201f9b912c0ef098e801c2c4b6e26ecad77865350c632d6b8e58630a0529e not found: ID does not exist" containerID="370201f9b912c0ef098e801c2c4b6e26ecad77865350c632d6b8e58630a0529e" Oct 01 13:48:43 crc kubenswrapper[4605]: I1001 13:48:43.780201 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"370201f9b912c0ef098e801c2c4b6e26ecad77865350c632d6b8e58630a0529e"} err="failed to get container status \"370201f9b912c0ef098e801c2c4b6e26ecad77865350c632d6b8e58630a0529e\": rpc error: code = NotFound desc = could not find container \"370201f9b912c0ef098e801c2c4b6e26ecad77865350c632d6b8e58630a0529e\": container with ID starting with 370201f9b912c0ef098e801c2c4b6e26ecad77865350c632d6b8e58630a0529e not found: ID does not exist" Oct 01 13:48:43 crc kubenswrapper[4605]: I1001 13:48:43.780214 4605 scope.go:117] "RemoveContainer" containerID="099615b2d8586dacf63fa6e85e621bdf7fd9a60239bbc13517eba1250255b798" Oct 01 13:48:43 crc kubenswrapper[4605]: E1001 13:48:43.780504 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"099615b2d8586dacf63fa6e85e621bdf7fd9a60239bbc13517eba1250255b798\": container with ID starting with 099615b2d8586dacf63fa6e85e621bdf7fd9a60239bbc13517eba1250255b798 not found: ID does not exist" containerID="099615b2d8586dacf63fa6e85e621bdf7fd9a60239bbc13517eba1250255b798" Oct 01 13:48:43 crc kubenswrapper[4605]: I1001 13:48:43.780524 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"099615b2d8586dacf63fa6e85e621bdf7fd9a60239bbc13517eba1250255b798"} err="failed to get container status \"099615b2d8586dacf63fa6e85e621bdf7fd9a60239bbc13517eba1250255b798\": rpc error: code = NotFound desc = could not find container \"099615b2d8586dacf63fa6e85e621bdf7fd9a60239bbc13517eba1250255b798\": container with ID starting with 099615b2d8586dacf63fa6e85e621bdf7fd9a60239bbc13517eba1250255b798 not found: ID does not exist" Oct 01 13:48:43 crc kubenswrapper[4605]: I1001 13:48:43.784335 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sdz8z\" (UniqueName: \"kubernetes.io/projected/69ee908d-c416-425e-acbf-0b212debb8dc-kube-api-access-sdz8z\") on node \"crc\" DevicePath \"\"" Oct 01 13:48:43 crc kubenswrapper[4605]: I1001 13:48:43.784355 4605 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/69ee908d-c416-425e-acbf-0b212debb8dc-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 13:48:43 crc kubenswrapper[4605]: I1001 13:48:43.784367 4605 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/69ee908d-c416-425e-acbf-0b212debb8dc-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 13:48:44 crc kubenswrapper[4605]: I1001 13:48:44.049808 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-n2zhx"] Oct 01 13:48:44 crc kubenswrapper[4605]: I1001 13:48:44.058009 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-n2zhx"] Oct 01 13:48:44 crc kubenswrapper[4605]: I1001 13:48:44.059129 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tlgqx" Oct 01 13:48:44 crc kubenswrapper[4605]: I1001 13:48:44.107045 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pt28q\" (UniqueName: \"kubernetes.io/projected/6639ba49-1401-4dc4-be03-f3f7bc43145c-kube-api-access-pt28q\") pod \"6639ba49-1401-4dc4-be03-f3f7bc43145c\" (UID: \"6639ba49-1401-4dc4-be03-f3f7bc43145c\") " Oct 01 13:48:44 crc kubenswrapper[4605]: I1001 13:48:44.107186 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6639ba49-1401-4dc4-be03-f3f7bc43145c-utilities\") pod \"6639ba49-1401-4dc4-be03-f3f7bc43145c\" (UID: \"6639ba49-1401-4dc4-be03-f3f7bc43145c\") " Oct 01 13:48:44 crc kubenswrapper[4605]: I1001 13:48:44.107234 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6639ba49-1401-4dc4-be03-f3f7bc43145c-catalog-content\") pod \"6639ba49-1401-4dc4-be03-f3f7bc43145c\" (UID: \"6639ba49-1401-4dc4-be03-f3f7bc43145c\") " Oct 01 13:48:44 crc kubenswrapper[4605]: I1001 13:48:44.108001 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6639ba49-1401-4dc4-be03-f3f7bc43145c-utilities" (OuterVolumeSpecName: "utilities") pod "6639ba49-1401-4dc4-be03-f3f7bc43145c" (UID: "6639ba49-1401-4dc4-be03-f3f7bc43145c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 13:48:44 crc kubenswrapper[4605]: I1001 13:48:44.110465 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6639ba49-1401-4dc4-be03-f3f7bc43145c-kube-api-access-pt28q" (OuterVolumeSpecName: "kube-api-access-pt28q") pod "6639ba49-1401-4dc4-be03-f3f7bc43145c" (UID: "6639ba49-1401-4dc4-be03-f3f7bc43145c"). InnerVolumeSpecName "kube-api-access-pt28q". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:48:44 crc kubenswrapper[4605]: I1001 13:48:44.156421 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6639ba49-1401-4dc4-be03-f3f7bc43145c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6639ba49-1401-4dc4-be03-f3f7bc43145c" (UID: "6639ba49-1401-4dc4-be03-f3f7bc43145c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 13:48:44 crc kubenswrapper[4605]: I1001 13:48:44.208845 4605 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6639ba49-1401-4dc4-be03-f3f7bc43145c-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 13:48:44 crc kubenswrapper[4605]: I1001 13:48:44.208878 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pt28q\" (UniqueName: \"kubernetes.io/projected/6639ba49-1401-4dc4-be03-f3f7bc43145c-kube-api-access-pt28q\") on node \"crc\" DevicePath \"\"" Oct 01 13:48:44 crc kubenswrapper[4605]: I1001 13:48:44.208893 4605 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6639ba49-1401-4dc4-be03-f3f7bc43145c-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 13:48:44 crc kubenswrapper[4605]: I1001 13:48:44.740433 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tlgqx" event={"ID":"6639ba49-1401-4dc4-be03-f3f7bc43145c","Type":"ContainerDied","Data":"dd2871f5f0e4486f3ad5aff9487a158a612405fff63628f5037732779f0d5be7"} Oct 01 13:48:44 crc kubenswrapper[4605]: I1001 13:48:44.740489 4605 scope.go:117] "RemoveContainer" containerID="8b914bd61cf55e56b86fa03ef9f9dd072d858c47c4d726630ed8a4bb4d7c810b" Oct 01 13:48:44 crc kubenswrapper[4605]: I1001 13:48:44.740508 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tlgqx" Oct 01 13:48:44 crc kubenswrapper[4605]: I1001 13:48:44.760322 4605 scope.go:117] "RemoveContainer" containerID="049d3f2796eb2b0a4c356275b4ab83e7d73cd235f1410fb2caf2f4093728afee" Oct 01 13:48:44 crc kubenswrapper[4605]: I1001 13:48:44.774525 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-tlgqx"] Oct 01 13:48:44 crc kubenswrapper[4605]: I1001 13:48:44.777410 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-tlgqx"] Oct 01 13:48:44 crc kubenswrapper[4605]: I1001 13:48:44.791314 4605 scope.go:117] "RemoveContainer" containerID="db604bd7592f84855d12f40d6cb86d2103956b2a152d9c27f9a60335086f9a55" Oct 01 13:48:45 crc kubenswrapper[4605]: I1001 13:48:45.395506 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-z9jm5"] Oct 01 13:48:45 crc kubenswrapper[4605]: I1001 13:48:45.395711 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-z9jm5" podUID="ad7c6809-d039-42e8-a1d9-3872a3751ad4" containerName="registry-server" containerID="cri-o://749ea6c326448517b5b62671e90d58a982bdae67ec468f12cf0b1a8dad58f255" gracePeriod=2 Oct 01 13:48:45 crc kubenswrapper[4605]: I1001 13:48:45.727473 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-z9jm5" Oct 01 13:48:45 crc kubenswrapper[4605]: I1001 13:48:45.753974 4605 generic.go:334] "Generic (PLEG): container finished" podID="ad7c6809-d039-42e8-a1d9-3872a3751ad4" containerID="749ea6c326448517b5b62671e90d58a982bdae67ec468f12cf0b1a8dad58f255" exitCode=0 Oct 01 13:48:45 crc kubenswrapper[4605]: I1001 13:48:45.754046 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-z9jm5" Oct 01 13:48:45 crc kubenswrapper[4605]: I1001 13:48:45.754197 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-z9jm5" event={"ID":"ad7c6809-d039-42e8-a1d9-3872a3751ad4","Type":"ContainerDied","Data":"749ea6c326448517b5b62671e90d58a982bdae67ec468f12cf0b1a8dad58f255"} Oct 01 13:48:45 crc kubenswrapper[4605]: I1001 13:48:45.754257 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-z9jm5" event={"ID":"ad7c6809-d039-42e8-a1d9-3872a3751ad4","Type":"ContainerDied","Data":"aec471d7ee3cc4d42d079311f91894b202bfd3c2c7fc56949baf97599c1768b2"} Oct 01 13:48:45 crc kubenswrapper[4605]: I1001 13:48:45.754293 4605 scope.go:117] "RemoveContainer" containerID="749ea6c326448517b5b62671e90d58a982bdae67ec468f12cf0b1a8dad58f255" Oct 01 13:48:45 crc kubenswrapper[4605]: I1001 13:48:45.768877 4605 scope.go:117] "RemoveContainer" containerID="a9b15f2b3ae06262259620ceaea72fe7106e7bdef8947d95e46271e5b6312186" Oct 01 13:48:45 crc kubenswrapper[4605]: I1001 13:48:45.784798 4605 scope.go:117] "RemoveContainer" containerID="6daae6576e44f19034d93d29964e5cb82140c0e35c1067a637163c699a179eb0" Oct 01 13:48:45 crc kubenswrapper[4605]: I1001 13:48:45.801394 4605 scope.go:117] "RemoveContainer" containerID="749ea6c326448517b5b62671e90d58a982bdae67ec468f12cf0b1a8dad58f255" Oct 01 13:48:45 crc kubenswrapper[4605]: E1001 13:48:45.801788 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"749ea6c326448517b5b62671e90d58a982bdae67ec468f12cf0b1a8dad58f255\": container with ID starting with 749ea6c326448517b5b62671e90d58a982bdae67ec468f12cf0b1a8dad58f255 not found: ID does not exist" containerID="749ea6c326448517b5b62671e90d58a982bdae67ec468f12cf0b1a8dad58f255" Oct 01 13:48:45 crc kubenswrapper[4605]: I1001 13:48:45.801822 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"749ea6c326448517b5b62671e90d58a982bdae67ec468f12cf0b1a8dad58f255"} err="failed to get container status \"749ea6c326448517b5b62671e90d58a982bdae67ec468f12cf0b1a8dad58f255\": rpc error: code = NotFound desc = could not find container \"749ea6c326448517b5b62671e90d58a982bdae67ec468f12cf0b1a8dad58f255\": container with ID starting with 749ea6c326448517b5b62671e90d58a982bdae67ec468f12cf0b1a8dad58f255 not found: ID does not exist" Oct 01 13:48:45 crc kubenswrapper[4605]: I1001 13:48:45.801843 4605 scope.go:117] "RemoveContainer" containerID="a9b15f2b3ae06262259620ceaea72fe7106e7bdef8947d95e46271e5b6312186" Oct 01 13:48:45 crc kubenswrapper[4605]: E1001 13:48:45.802164 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a9b15f2b3ae06262259620ceaea72fe7106e7bdef8947d95e46271e5b6312186\": container with ID starting with a9b15f2b3ae06262259620ceaea72fe7106e7bdef8947d95e46271e5b6312186 not found: ID does not exist" containerID="a9b15f2b3ae06262259620ceaea72fe7106e7bdef8947d95e46271e5b6312186" Oct 01 13:48:45 crc kubenswrapper[4605]: I1001 13:48:45.802189 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a9b15f2b3ae06262259620ceaea72fe7106e7bdef8947d95e46271e5b6312186"} err="failed to get container status \"a9b15f2b3ae06262259620ceaea72fe7106e7bdef8947d95e46271e5b6312186\": rpc error: code = NotFound desc = could not find container \"a9b15f2b3ae06262259620ceaea72fe7106e7bdef8947d95e46271e5b6312186\": container with ID starting with a9b15f2b3ae06262259620ceaea72fe7106e7bdef8947d95e46271e5b6312186 not found: ID does not exist" Oct 01 13:48:45 crc kubenswrapper[4605]: I1001 13:48:45.802203 4605 scope.go:117] "RemoveContainer" containerID="6daae6576e44f19034d93d29964e5cb82140c0e35c1067a637163c699a179eb0" Oct 01 13:48:45 crc kubenswrapper[4605]: E1001 13:48:45.802403 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6daae6576e44f19034d93d29964e5cb82140c0e35c1067a637163c699a179eb0\": container with ID starting with 6daae6576e44f19034d93d29964e5cb82140c0e35c1067a637163c699a179eb0 not found: ID does not exist" containerID="6daae6576e44f19034d93d29964e5cb82140c0e35c1067a637163c699a179eb0" Oct 01 13:48:45 crc kubenswrapper[4605]: I1001 13:48:45.802426 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6daae6576e44f19034d93d29964e5cb82140c0e35c1067a637163c699a179eb0"} err="failed to get container status \"6daae6576e44f19034d93d29964e5cb82140c0e35c1067a637163c699a179eb0\": rpc error: code = NotFound desc = could not find container \"6daae6576e44f19034d93d29964e5cb82140c0e35c1067a637163c699a179eb0\": container with ID starting with 6daae6576e44f19034d93d29964e5cb82140c0e35c1067a637163c699a179eb0 not found: ID does not exist" Oct 01 13:48:45 crc kubenswrapper[4605]: I1001 13:48:45.823043 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b7rp6\" (UniqueName: \"kubernetes.io/projected/ad7c6809-d039-42e8-a1d9-3872a3751ad4-kube-api-access-b7rp6\") pod \"ad7c6809-d039-42e8-a1d9-3872a3751ad4\" (UID: \"ad7c6809-d039-42e8-a1d9-3872a3751ad4\") " Oct 01 13:48:45 crc kubenswrapper[4605]: I1001 13:48:45.823110 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ad7c6809-d039-42e8-a1d9-3872a3751ad4-utilities\") pod \"ad7c6809-d039-42e8-a1d9-3872a3751ad4\" (UID: \"ad7c6809-d039-42e8-a1d9-3872a3751ad4\") " Oct 01 13:48:45 crc kubenswrapper[4605]: I1001 13:48:45.823175 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ad7c6809-d039-42e8-a1d9-3872a3751ad4-catalog-content\") pod \"ad7c6809-d039-42e8-a1d9-3872a3751ad4\" (UID: \"ad7c6809-d039-42e8-a1d9-3872a3751ad4\") " Oct 01 13:48:45 crc kubenswrapper[4605]: I1001 13:48:45.825121 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ad7c6809-d039-42e8-a1d9-3872a3751ad4-utilities" (OuterVolumeSpecName: "utilities") pod "ad7c6809-d039-42e8-a1d9-3872a3751ad4" (UID: "ad7c6809-d039-42e8-a1d9-3872a3751ad4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 13:48:45 crc kubenswrapper[4605]: I1001 13:48:45.833279 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ad7c6809-d039-42e8-a1d9-3872a3751ad4-kube-api-access-b7rp6" (OuterVolumeSpecName: "kube-api-access-b7rp6") pod "ad7c6809-d039-42e8-a1d9-3872a3751ad4" (UID: "ad7c6809-d039-42e8-a1d9-3872a3751ad4"). InnerVolumeSpecName "kube-api-access-b7rp6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:48:45 crc kubenswrapper[4605]: I1001 13:48:45.838501 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ad7c6809-d039-42e8-a1d9-3872a3751ad4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ad7c6809-d039-42e8-a1d9-3872a3751ad4" (UID: "ad7c6809-d039-42e8-a1d9-3872a3751ad4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 13:48:45 crc kubenswrapper[4605]: I1001 13:48:45.924429 4605 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ad7c6809-d039-42e8-a1d9-3872a3751ad4-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 13:48:45 crc kubenswrapper[4605]: I1001 13:48:45.924976 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b7rp6\" (UniqueName: \"kubernetes.io/projected/ad7c6809-d039-42e8-a1d9-3872a3751ad4-kube-api-access-b7rp6\") on node \"crc\" DevicePath \"\"" Oct 01 13:48:45 crc kubenswrapper[4605]: I1001 13:48:45.925062 4605 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ad7c6809-d039-42e8-a1d9-3872a3751ad4-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 13:48:45 crc kubenswrapper[4605]: I1001 13:48:45.932447 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6639ba49-1401-4dc4-be03-f3f7bc43145c" path="/var/lib/kubelet/pods/6639ba49-1401-4dc4-be03-f3f7bc43145c/volumes" Oct 01 13:48:45 crc kubenswrapper[4605]: I1001 13:48:45.933034 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="69ee908d-c416-425e-acbf-0b212debb8dc" path="/var/lib/kubelet/pods/69ee908d-c416-425e-acbf-0b212debb8dc/volumes" Oct 01 13:48:46 crc kubenswrapper[4605]: I1001 13:48:46.074006 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-z9jm5"] Oct 01 13:48:46 crc kubenswrapper[4605]: I1001 13:48:46.077568 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-z9jm5"] Oct 01 13:48:47 crc kubenswrapper[4605]: I1001 13:48:47.937773 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ad7c6809-d039-42e8-a1d9-3872a3751ad4" path="/var/lib/kubelet/pods/ad7c6809-d039-42e8-a1d9-3872a3751ad4/volumes" Oct 01 13:48:59 crc kubenswrapper[4605]: I1001 13:48:59.724432 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-trd6j"] Oct 01 13:49:24 crc kubenswrapper[4605]: I1001 13:49:24.753371 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-trd6j" podUID="7f6de1d9-61b5-4cc6-a820-5492052b60ef" containerName="oauth-openshift" containerID="cri-o://c887456e4902033c6f73330006a16d65ecd4b64d7f8f191c2772990db88e0747" gracePeriod=15 Oct 01 13:49:24 crc kubenswrapper[4605]: I1001 13:49:24.950735 4605 generic.go:334] "Generic (PLEG): container finished" podID="7f6de1d9-61b5-4cc6-a820-5492052b60ef" containerID="c887456e4902033c6f73330006a16d65ecd4b64d7f8f191c2772990db88e0747" exitCode=0 Oct 01 13:49:24 crc kubenswrapper[4605]: I1001 13:49:24.950934 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-trd6j" event={"ID":"7f6de1d9-61b5-4cc6-a820-5492052b60ef","Type":"ContainerDied","Data":"c887456e4902033c6f73330006a16d65ecd4b64d7f8f191c2772990db88e0747"} Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.158048 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-trd6j" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.206154 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-7fb5d9b995-rdm4q"] Oct 01 13:49:25 crc kubenswrapper[4605]: E1001 13:49:25.206414 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23fa8712-77a3-4af4-95cb-827936ad280a" containerName="pruner" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.206430 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="23fa8712-77a3-4af4-95cb-827936ad280a" containerName="pruner" Oct 01 13:49:25 crc kubenswrapper[4605]: E1001 13:49:25.206446 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad7c6809-d039-42e8-a1d9-3872a3751ad4" containerName="extract-content" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.206454 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad7c6809-d039-42e8-a1d9-3872a3751ad4" containerName="extract-content" Oct 01 13:49:25 crc kubenswrapper[4605]: E1001 13:49:25.206465 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69ee908d-c416-425e-acbf-0b212debb8dc" containerName="registry-server" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.206473 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="69ee908d-c416-425e-acbf-0b212debb8dc" containerName="registry-server" Oct 01 13:49:25 crc kubenswrapper[4605]: E1001 13:49:25.206483 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6639ba49-1401-4dc4-be03-f3f7bc43145c" containerName="registry-server" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.206491 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="6639ba49-1401-4dc4-be03-f3f7bc43145c" containerName="registry-server" Oct 01 13:49:25 crc kubenswrapper[4605]: E1001 13:49:25.206501 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f6de1d9-61b5-4cc6-a820-5492052b60ef" containerName="oauth-openshift" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.206508 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f6de1d9-61b5-4cc6-a820-5492052b60ef" containerName="oauth-openshift" Oct 01 13:49:25 crc kubenswrapper[4605]: E1001 13:49:25.206516 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad7c6809-d039-42e8-a1d9-3872a3751ad4" containerName="registry-server" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.206525 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad7c6809-d039-42e8-a1d9-3872a3751ad4" containerName="registry-server" Oct 01 13:49:25 crc kubenswrapper[4605]: E1001 13:49:25.206536 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69ee908d-c416-425e-acbf-0b212debb8dc" containerName="extract-utilities" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.206544 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="69ee908d-c416-425e-acbf-0b212debb8dc" containerName="extract-utilities" Oct 01 13:49:25 crc kubenswrapper[4605]: E1001 13:49:25.206552 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69ee908d-c416-425e-acbf-0b212debb8dc" containerName="extract-content" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.206560 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="69ee908d-c416-425e-acbf-0b212debb8dc" containerName="extract-content" Oct 01 13:49:25 crc kubenswrapper[4605]: E1001 13:49:25.206571 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bad96939-62fd-4f1d-975f-1de2d5a868c4" containerName="pruner" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.206579 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="bad96939-62fd-4f1d-975f-1de2d5a868c4" containerName="pruner" Oct 01 13:49:25 crc kubenswrapper[4605]: E1001 13:49:25.206593 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad7c6809-d039-42e8-a1d9-3872a3751ad4" containerName="extract-utilities" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.206601 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad7c6809-d039-42e8-a1d9-3872a3751ad4" containerName="extract-utilities" Oct 01 13:49:25 crc kubenswrapper[4605]: E1001 13:49:25.206613 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abf8ee39-bbbb-4d2a-9503-ed6b2ecbf051" containerName="registry-server" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.206620 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="abf8ee39-bbbb-4d2a-9503-ed6b2ecbf051" containerName="registry-server" Oct 01 13:49:25 crc kubenswrapper[4605]: E1001 13:49:25.206629 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abf8ee39-bbbb-4d2a-9503-ed6b2ecbf051" containerName="extract-content" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.206636 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="abf8ee39-bbbb-4d2a-9503-ed6b2ecbf051" containerName="extract-content" Oct 01 13:49:25 crc kubenswrapper[4605]: E1001 13:49:25.206646 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6639ba49-1401-4dc4-be03-f3f7bc43145c" containerName="extract-utilities" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.206655 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="6639ba49-1401-4dc4-be03-f3f7bc43145c" containerName="extract-utilities" Oct 01 13:49:25 crc kubenswrapper[4605]: E1001 13:49:25.206665 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6639ba49-1401-4dc4-be03-f3f7bc43145c" containerName="extract-content" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.206672 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="6639ba49-1401-4dc4-be03-f3f7bc43145c" containerName="extract-content" Oct 01 13:49:25 crc kubenswrapper[4605]: E1001 13:49:25.206683 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abf8ee39-bbbb-4d2a-9503-ed6b2ecbf051" containerName="extract-utilities" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.206692 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="abf8ee39-bbbb-4d2a-9503-ed6b2ecbf051" containerName="extract-utilities" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.206799 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad7c6809-d039-42e8-a1d9-3872a3751ad4" containerName="registry-server" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.206814 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="bad96939-62fd-4f1d-975f-1de2d5a868c4" containerName="pruner" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.206826 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="abf8ee39-bbbb-4d2a-9503-ed6b2ecbf051" containerName="registry-server" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.206833 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="7f6de1d9-61b5-4cc6-a820-5492052b60ef" containerName="oauth-openshift" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.206843 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="23fa8712-77a3-4af4-95cb-827936ad280a" containerName="pruner" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.206857 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="6639ba49-1401-4dc4-be03-f3f7bc43145c" containerName="registry-server" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.206869 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="69ee908d-c416-425e-acbf-0b212debb8dc" containerName="registry-server" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.207325 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-7fb5d9b995-rdm4q" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.210356 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-7fb5d9b995-rdm4q"] Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.283643 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/7f6de1d9-61b5-4cc6-a820-5492052b60ef-audit-policies\") pod \"7f6de1d9-61b5-4cc6-a820-5492052b60ef\" (UID: \"7f6de1d9-61b5-4cc6-a820-5492052b60ef\") " Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.283716 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/7f6de1d9-61b5-4cc6-a820-5492052b60ef-v4-0-config-user-template-provider-selection\") pod \"7f6de1d9-61b5-4cc6-a820-5492052b60ef\" (UID: \"7f6de1d9-61b5-4cc6-a820-5492052b60ef\") " Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.283769 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/7f6de1d9-61b5-4cc6-a820-5492052b60ef-v4-0-config-user-template-error\") pod \"7f6de1d9-61b5-4cc6-a820-5492052b60ef\" (UID: \"7f6de1d9-61b5-4cc6-a820-5492052b60ef\") " Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.283834 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/7f6de1d9-61b5-4cc6-a820-5492052b60ef-v4-0-config-system-session\") pod \"7f6de1d9-61b5-4cc6-a820-5492052b60ef\" (UID: \"7f6de1d9-61b5-4cc6-a820-5492052b60ef\") " Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.283907 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/7f6de1d9-61b5-4cc6-a820-5492052b60ef-v4-0-config-user-idp-0-file-data\") pod \"7f6de1d9-61b5-4cc6-a820-5492052b60ef\" (UID: \"7f6de1d9-61b5-4cc6-a820-5492052b60ef\") " Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.283938 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7szsr\" (UniqueName: \"kubernetes.io/projected/7f6de1d9-61b5-4cc6-a820-5492052b60ef-kube-api-access-7szsr\") pod \"7f6de1d9-61b5-4cc6-a820-5492052b60ef\" (UID: \"7f6de1d9-61b5-4cc6-a820-5492052b60ef\") " Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.283975 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/7f6de1d9-61b5-4cc6-a820-5492052b60ef-v4-0-config-system-router-certs\") pod \"7f6de1d9-61b5-4cc6-a820-5492052b60ef\" (UID: \"7f6de1d9-61b5-4cc6-a820-5492052b60ef\") " Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.284016 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/7f6de1d9-61b5-4cc6-a820-5492052b60ef-v4-0-config-system-serving-cert\") pod \"7f6de1d9-61b5-4cc6-a820-5492052b60ef\" (UID: \"7f6de1d9-61b5-4cc6-a820-5492052b60ef\") " Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.284073 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7f6de1d9-61b5-4cc6-a820-5492052b60ef-v4-0-config-system-trusted-ca-bundle\") pod \"7f6de1d9-61b5-4cc6-a820-5492052b60ef\" (UID: \"7f6de1d9-61b5-4cc6-a820-5492052b60ef\") " Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.284166 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/7f6de1d9-61b5-4cc6-a820-5492052b60ef-v4-0-config-system-service-ca\") pod \"7f6de1d9-61b5-4cc6-a820-5492052b60ef\" (UID: \"7f6de1d9-61b5-4cc6-a820-5492052b60ef\") " Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.284207 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/7f6de1d9-61b5-4cc6-a820-5492052b60ef-v4-0-config-system-cliconfig\") pod \"7f6de1d9-61b5-4cc6-a820-5492052b60ef\" (UID: \"7f6de1d9-61b5-4cc6-a820-5492052b60ef\") " Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.284250 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/7f6de1d9-61b5-4cc6-a820-5492052b60ef-v4-0-config-system-ocp-branding-template\") pod \"7f6de1d9-61b5-4cc6-a820-5492052b60ef\" (UID: \"7f6de1d9-61b5-4cc6-a820-5492052b60ef\") " Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.284307 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/7f6de1d9-61b5-4cc6-a820-5492052b60ef-v4-0-config-user-template-login\") pod \"7f6de1d9-61b5-4cc6-a820-5492052b60ef\" (UID: \"7f6de1d9-61b5-4cc6-a820-5492052b60ef\") " Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.284338 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7f6de1d9-61b5-4cc6-a820-5492052b60ef-audit-dir\") pod \"7f6de1d9-61b5-4cc6-a820-5492052b60ef\" (UID: \"7f6de1d9-61b5-4cc6-a820-5492052b60ef\") " Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.284691 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7f6de1d9-61b5-4cc6-a820-5492052b60ef-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "7f6de1d9-61b5-4cc6-a820-5492052b60ef" (UID: "7f6de1d9-61b5-4cc6-a820-5492052b60ef"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.285810 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7f6de1d9-61b5-4cc6-a820-5492052b60ef-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "7f6de1d9-61b5-4cc6-a820-5492052b60ef" (UID: "7f6de1d9-61b5-4cc6-a820-5492052b60ef"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.285849 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7f6de1d9-61b5-4cc6-a820-5492052b60ef-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "7f6de1d9-61b5-4cc6-a820-5492052b60ef" (UID: "7f6de1d9-61b5-4cc6-a820-5492052b60ef"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.287942 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7f6de1d9-61b5-4cc6-a820-5492052b60ef-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "7f6de1d9-61b5-4cc6-a820-5492052b60ef" (UID: "7f6de1d9-61b5-4cc6-a820-5492052b60ef"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.290788 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7f6de1d9-61b5-4cc6-a820-5492052b60ef-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "7f6de1d9-61b5-4cc6-a820-5492052b60ef" (UID: "7f6de1d9-61b5-4cc6-a820-5492052b60ef"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.300325 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7f6de1d9-61b5-4cc6-a820-5492052b60ef-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "7f6de1d9-61b5-4cc6-a820-5492052b60ef" (UID: "7f6de1d9-61b5-4cc6-a820-5492052b60ef"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.300492 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7f6de1d9-61b5-4cc6-a820-5492052b60ef-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "7f6de1d9-61b5-4cc6-a820-5492052b60ef" (UID: "7f6de1d9-61b5-4cc6-a820-5492052b60ef"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.301051 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7f6de1d9-61b5-4cc6-a820-5492052b60ef-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "7f6de1d9-61b5-4cc6-a820-5492052b60ef" (UID: "7f6de1d9-61b5-4cc6-a820-5492052b60ef"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.301505 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7f6de1d9-61b5-4cc6-a820-5492052b60ef-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "7f6de1d9-61b5-4cc6-a820-5492052b60ef" (UID: "7f6de1d9-61b5-4cc6-a820-5492052b60ef"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.301615 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7f6de1d9-61b5-4cc6-a820-5492052b60ef-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "7f6de1d9-61b5-4cc6-a820-5492052b60ef" (UID: "7f6de1d9-61b5-4cc6-a820-5492052b60ef"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.302165 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7f6de1d9-61b5-4cc6-a820-5492052b60ef-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "7f6de1d9-61b5-4cc6-a820-5492052b60ef" (UID: "7f6de1d9-61b5-4cc6-a820-5492052b60ef"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.302304 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7f6de1d9-61b5-4cc6-a820-5492052b60ef-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "7f6de1d9-61b5-4cc6-a820-5492052b60ef" (UID: "7f6de1d9-61b5-4cc6-a820-5492052b60ef"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.307772 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7f6de1d9-61b5-4cc6-a820-5492052b60ef-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "7f6de1d9-61b5-4cc6-a820-5492052b60ef" (UID: "7f6de1d9-61b5-4cc6-a820-5492052b60ef"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.308960 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7f6de1d9-61b5-4cc6-a820-5492052b60ef-kube-api-access-7szsr" (OuterVolumeSpecName: "kube-api-access-7szsr") pod "7f6de1d9-61b5-4cc6-a820-5492052b60ef" (UID: "7f6de1d9-61b5-4cc6-a820-5492052b60ef"). InnerVolumeSpecName "kube-api-access-7szsr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.385973 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/01fbfa95-5d03-4421-8d7b-f5cd5f500236-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-7fb5d9b995-rdm4q\" (UID: \"01fbfa95-5d03-4421-8d7b-f5cd5f500236\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-rdm4q" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.386047 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/01fbfa95-5d03-4421-8d7b-f5cd5f500236-v4-0-config-system-service-ca\") pod \"oauth-openshift-7fb5d9b995-rdm4q\" (UID: \"01fbfa95-5d03-4421-8d7b-f5cd5f500236\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-rdm4q" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.386073 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/01fbfa95-5d03-4421-8d7b-f5cd5f500236-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-7fb5d9b995-rdm4q\" (UID: \"01fbfa95-5d03-4421-8d7b-f5cd5f500236\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-rdm4q" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.386132 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/01fbfa95-5d03-4421-8d7b-f5cd5f500236-audit-policies\") pod \"oauth-openshift-7fb5d9b995-rdm4q\" (UID: \"01fbfa95-5d03-4421-8d7b-f5cd5f500236\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-rdm4q" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.386171 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/01fbfa95-5d03-4421-8d7b-f5cd5f500236-audit-dir\") pod \"oauth-openshift-7fb5d9b995-rdm4q\" (UID: \"01fbfa95-5d03-4421-8d7b-f5cd5f500236\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-rdm4q" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.386420 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/01fbfa95-5d03-4421-8d7b-f5cd5f500236-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-7fb5d9b995-rdm4q\" (UID: \"01fbfa95-5d03-4421-8d7b-f5cd5f500236\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-rdm4q" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.386479 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/01fbfa95-5d03-4421-8d7b-f5cd5f500236-v4-0-config-system-serving-cert\") pod \"oauth-openshift-7fb5d9b995-rdm4q\" (UID: \"01fbfa95-5d03-4421-8d7b-f5cd5f500236\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-rdm4q" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.386559 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/01fbfa95-5d03-4421-8d7b-f5cd5f500236-v4-0-config-user-template-login\") pod \"oauth-openshift-7fb5d9b995-rdm4q\" (UID: \"01fbfa95-5d03-4421-8d7b-f5cd5f500236\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-rdm4q" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.386593 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ml9kj\" (UniqueName: \"kubernetes.io/projected/01fbfa95-5d03-4421-8d7b-f5cd5f500236-kube-api-access-ml9kj\") pod \"oauth-openshift-7fb5d9b995-rdm4q\" (UID: \"01fbfa95-5d03-4421-8d7b-f5cd5f500236\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-rdm4q" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.386649 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/01fbfa95-5d03-4421-8d7b-f5cd5f500236-v4-0-config-system-cliconfig\") pod \"oauth-openshift-7fb5d9b995-rdm4q\" (UID: \"01fbfa95-5d03-4421-8d7b-f5cd5f500236\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-rdm4q" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.386668 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/01fbfa95-5d03-4421-8d7b-f5cd5f500236-v4-0-config-user-template-error\") pod \"oauth-openshift-7fb5d9b995-rdm4q\" (UID: \"01fbfa95-5d03-4421-8d7b-f5cd5f500236\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-rdm4q" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.386788 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/01fbfa95-5d03-4421-8d7b-f5cd5f500236-v4-0-config-system-router-certs\") pod \"oauth-openshift-7fb5d9b995-rdm4q\" (UID: \"01fbfa95-5d03-4421-8d7b-f5cd5f500236\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-rdm4q" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.386867 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/01fbfa95-5d03-4421-8d7b-f5cd5f500236-v4-0-config-system-session\") pod \"oauth-openshift-7fb5d9b995-rdm4q\" (UID: \"01fbfa95-5d03-4421-8d7b-f5cd5f500236\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-rdm4q" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.386936 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/01fbfa95-5d03-4421-8d7b-f5cd5f500236-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-7fb5d9b995-rdm4q\" (UID: \"01fbfa95-5d03-4421-8d7b-f5cd5f500236\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-rdm4q" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.387044 4605 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/7f6de1d9-61b5-4cc6-a820-5492052b60ef-audit-policies\") on node \"crc\" DevicePath \"\"" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.387069 4605 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/7f6de1d9-61b5-4cc6-a820-5492052b60ef-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.387081 4605 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/7f6de1d9-61b5-4cc6-a820-5492052b60ef-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.387106 4605 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/7f6de1d9-61b5-4cc6-a820-5492052b60ef-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.387116 4605 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/7f6de1d9-61b5-4cc6-a820-5492052b60ef-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.387125 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7szsr\" (UniqueName: \"kubernetes.io/projected/7f6de1d9-61b5-4cc6-a820-5492052b60ef-kube-api-access-7szsr\") on node \"crc\" DevicePath \"\"" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.387135 4605 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/7f6de1d9-61b5-4cc6-a820-5492052b60ef-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.387144 4605 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/7f6de1d9-61b5-4cc6-a820-5492052b60ef-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.387154 4605 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7f6de1d9-61b5-4cc6-a820-5492052b60ef-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.387163 4605 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/7f6de1d9-61b5-4cc6-a820-5492052b60ef-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.387172 4605 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/7f6de1d9-61b5-4cc6-a820-5492052b60ef-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.387180 4605 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/7f6de1d9-61b5-4cc6-a820-5492052b60ef-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.387192 4605 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/7f6de1d9-61b5-4cc6-a820-5492052b60ef-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.387204 4605 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7f6de1d9-61b5-4cc6-a820-5492052b60ef-audit-dir\") on node \"crc\" DevicePath \"\"" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.488189 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/01fbfa95-5d03-4421-8d7b-f5cd5f500236-v4-0-config-system-router-certs\") pod \"oauth-openshift-7fb5d9b995-rdm4q\" (UID: \"01fbfa95-5d03-4421-8d7b-f5cd5f500236\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-rdm4q" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.488231 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/01fbfa95-5d03-4421-8d7b-f5cd5f500236-v4-0-config-system-session\") pod \"oauth-openshift-7fb5d9b995-rdm4q\" (UID: \"01fbfa95-5d03-4421-8d7b-f5cd5f500236\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-rdm4q" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.488257 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/01fbfa95-5d03-4421-8d7b-f5cd5f500236-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-7fb5d9b995-rdm4q\" (UID: \"01fbfa95-5d03-4421-8d7b-f5cd5f500236\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-rdm4q" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.488284 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/01fbfa95-5d03-4421-8d7b-f5cd5f500236-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-7fb5d9b995-rdm4q\" (UID: \"01fbfa95-5d03-4421-8d7b-f5cd5f500236\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-rdm4q" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.488302 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/01fbfa95-5d03-4421-8d7b-f5cd5f500236-v4-0-config-system-service-ca\") pod \"oauth-openshift-7fb5d9b995-rdm4q\" (UID: \"01fbfa95-5d03-4421-8d7b-f5cd5f500236\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-rdm4q" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.488321 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/01fbfa95-5d03-4421-8d7b-f5cd5f500236-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-7fb5d9b995-rdm4q\" (UID: \"01fbfa95-5d03-4421-8d7b-f5cd5f500236\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-rdm4q" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.488348 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/01fbfa95-5d03-4421-8d7b-f5cd5f500236-audit-policies\") pod \"oauth-openshift-7fb5d9b995-rdm4q\" (UID: \"01fbfa95-5d03-4421-8d7b-f5cd5f500236\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-rdm4q" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.488377 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/01fbfa95-5d03-4421-8d7b-f5cd5f500236-audit-dir\") pod \"oauth-openshift-7fb5d9b995-rdm4q\" (UID: \"01fbfa95-5d03-4421-8d7b-f5cd5f500236\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-rdm4q" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.488408 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/01fbfa95-5d03-4421-8d7b-f5cd5f500236-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-7fb5d9b995-rdm4q\" (UID: \"01fbfa95-5d03-4421-8d7b-f5cd5f500236\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-rdm4q" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.488432 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/01fbfa95-5d03-4421-8d7b-f5cd5f500236-v4-0-config-system-serving-cert\") pod \"oauth-openshift-7fb5d9b995-rdm4q\" (UID: \"01fbfa95-5d03-4421-8d7b-f5cd5f500236\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-rdm4q" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.488463 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/01fbfa95-5d03-4421-8d7b-f5cd5f500236-v4-0-config-user-template-login\") pod \"oauth-openshift-7fb5d9b995-rdm4q\" (UID: \"01fbfa95-5d03-4421-8d7b-f5cd5f500236\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-rdm4q" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.488484 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ml9kj\" (UniqueName: \"kubernetes.io/projected/01fbfa95-5d03-4421-8d7b-f5cd5f500236-kube-api-access-ml9kj\") pod \"oauth-openshift-7fb5d9b995-rdm4q\" (UID: \"01fbfa95-5d03-4421-8d7b-f5cd5f500236\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-rdm4q" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.488516 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/01fbfa95-5d03-4421-8d7b-f5cd5f500236-v4-0-config-system-cliconfig\") pod \"oauth-openshift-7fb5d9b995-rdm4q\" (UID: \"01fbfa95-5d03-4421-8d7b-f5cd5f500236\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-rdm4q" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.488536 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/01fbfa95-5d03-4421-8d7b-f5cd5f500236-v4-0-config-user-template-error\") pod \"oauth-openshift-7fb5d9b995-rdm4q\" (UID: \"01fbfa95-5d03-4421-8d7b-f5cd5f500236\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-rdm4q" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.489858 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/01fbfa95-5d03-4421-8d7b-f5cd5f500236-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-7fb5d9b995-rdm4q\" (UID: \"01fbfa95-5d03-4421-8d7b-f5cd5f500236\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-rdm4q" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.490756 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/01fbfa95-5d03-4421-8d7b-f5cd5f500236-audit-policies\") pod \"oauth-openshift-7fb5d9b995-rdm4q\" (UID: \"01fbfa95-5d03-4421-8d7b-f5cd5f500236\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-rdm4q" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.491580 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/01fbfa95-5d03-4421-8d7b-f5cd5f500236-v4-0-config-user-template-error\") pod \"oauth-openshift-7fb5d9b995-rdm4q\" (UID: \"01fbfa95-5d03-4421-8d7b-f5cd5f500236\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-rdm4q" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.491820 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/01fbfa95-5d03-4421-8d7b-f5cd5f500236-v4-0-config-system-service-ca\") pod \"oauth-openshift-7fb5d9b995-rdm4q\" (UID: \"01fbfa95-5d03-4421-8d7b-f5cd5f500236\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-rdm4q" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.492206 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/01fbfa95-5d03-4421-8d7b-f5cd5f500236-audit-dir\") pod \"oauth-openshift-7fb5d9b995-rdm4q\" (UID: \"01fbfa95-5d03-4421-8d7b-f5cd5f500236\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-rdm4q" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.492589 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/01fbfa95-5d03-4421-8d7b-f5cd5f500236-v4-0-config-system-session\") pod \"oauth-openshift-7fb5d9b995-rdm4q\" (UID: \"01fbfa95-5d03-4421-8d7b-f5cd5f500236\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-rdm4q" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.493068 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/01fbfa95-5d03-4421-8d7b-f5cd5f500236-v4-0-config-system-router-certs\") pod \"oauth-openshift-7fb5d9b995-rdm4q\" (UID: \"01fbfa95-5d03-4421-8d7b-f5cd5f500236\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-rdm4q" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.493403 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/01fbfa95-5d03-4421-8d7b-f5cd5f500236-v4-0-config-system-cliconfig\") pod \"oauth-openshift-7fb5d9b995-rdm4q\" (UID: \"01fbfa95-5d03-4421-8d7b-f5cd5f500236\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-rdm4q" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.493453 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/01fbfa95-5d03-4421-8d7b-f5cd5f500236-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-7fb5d9b995-rdm4q\" (UID: \"01fbfa95-5d03-4421-8d7b-f5cd5f500236\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-rdm4q" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.494635 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/01fbfa95-5d03-4421-8d7b-f5cd5f500236-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-7fb5d9b995-rdm4q\" (UID: \"01fbfa95-5d03-4421-8d7b-f5cd5f500236\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-rdm4q" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.494927 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/01fbfa95-5d03-4421-8d7b-f5cd5f500236-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-7fb5d9b995-rdm4q\" (UID: \"01fbfa95-5d03-4421-8d7b-f5cd5f500236\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-rdm4q" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.495023 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/01fbfa95-5d03-4421-8d7b-f5cd5f500236-v4-0-config-user-template-login\") pod \"oauth-openshift-7fb5d9b995-rdm4q\" (UID: \"01fbfa95-5d03-4421-8d7b-f5cd5f500236\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-rdm4q" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.498160 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/01fbfa95-5d03-4421-8d7b-f5cd5f500236-v4-0-config-system-serving-cert\") pod \"oauth-openshift-7fb5d9b995-rdm4q\" (UID: \"01fbfa95-5d03-4421-8d7b-f5cd5f500236\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-rdm4q" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.508589 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ml9kj\" (UniqueName: \"kubernetes.io/projected/01fbfa95-5d03-4421-8d7b-f5cd5f500236-kube-api-access-ml9kj\") pod \"oauth-openshift-7fb5d9b995-rdm4q\" (UID: \"01fbfa95-5d03-4421-8d7b-f5cd5f500236\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-rdm4q" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.525364 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-7fb5d9b995-rdm4q" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.726176 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-7fb5d9b995-rdm4q"] Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.957310 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-7fb5d9b995-rdm4q" event={"ID":"01fbfa95-5d03-4421-8d7b-f5cd5f500236","Type":"ContainerStarted","Data":"0c602f5a41d847cc317c3ce034dad4cb72f1ea31dec29f84f2a1f68c4c750bc5"} Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.959775 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-trd6j" event={"ID":"7f6de1d9-61b5-4cc6-a820-5492052b60ef","Type":"ContainerDied","Data":"410f3596f784a7a1c2a185af945f5ce66d7471fd5c4873925db7de33891632a9"} Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.959872 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-trd6j" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.960040 4605 scope.go:117] "RemoveContainer" containerID="c887456e4902033c6f73330006a16d65ecd4b64d7f8f191c2772990db88e0747" Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.979119 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-trd6j"] Oct 01 13:49:25 crc kubenswrapper[4605]: I1001 13:49:25.982168 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-trd6j"] Oct 01 13:49:26 crc kubenswrapper[4605]: I1001 13:49:26.967718 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-7fb5d9b995-rdm4q" event={"ID":"01fbfa95-5d03-4421-8d7b-f5cd5f500236","Type":"ContainerStarted","Data":"0dc33191a2bac457daf10d387ee33ab63eb0506d45ee83fd663f05a42c137f17"} Oct 01 13:49:26 crc kubenswrapper[4605]: I1001 13:49:26.967830 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-7fb5d9b995-rdm4q" Oct 01 13:49:26 crc kubenswrapper[4605]: I1001 13:49:26.973649 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-7fb5d9b995-rdm4q" Oct 01 13:49:26 crc kubenswrapper[4605]: I1001 13:49:26.994777 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-7fb5d9b995-rdm4q" podStartSLOduration=27.9947408 podStartE2EDuration="27.9947408s" podCreationTimestamp="2025-10-01 13:48:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 13:49:26.98702912 +0000 UTC m=+289.731005338" watchObservedRunningTime="2025-10-01 13:49:26.9947408 +0000 UTC m=+289.738717038" Oct 01 13:49:27 crc kubenswrapper[4605]: I1001 13:49:27.934157 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7f6de1d9-61b5-4cc6-a820-5492052b60ef" path="/var/lib/kubelet/pods/7f6de1d9-61b5-4cc6-a820-5492052b60ef/volumes" Oct 01 13:49:55 crc kubenswrapper[4605]: I1001 13:49:55.161770 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-z5nlj"] Oct 01 13:49:55 crc kubenswrapper[4605]: I1001 13:49:55.162542 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-z5nlj" podUID="370d45c2-c748-4333-bbf5-9f2767f225f5" containerName="registry-server" containerID="cri-o://1f796fbd2b97c4de216532222df4369dd6bffcf4e02bf7bce4be5de98d0455c3" gracePeriod=30 Oct 01 13:49:55 crc kubenswrapper[4605]: I1001 13:49:55.168993 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-p7h8s"] Oct 01 13:49:55 crc kubenswrapper[4605]: I1001 13:49:55.172398 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-p7h8s" podUID="fd03b5be-380c-4bc8-aea1-2467ebe8a390" containerName="registry-server" containerID="cri-o://1f3bf5d2ba2aa154a818d8932d87b9ec7a3de3281efc343141b81494db058af2" gracePeriod=30 Oct 01 13:49:55 crc kubenswrapper[4605]: I1001 13:49:55.183072 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-hv7vm"] Oct 01 13:49:55 crc kubenswrapper[4605]: I1001 13:49:55.183297 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-hv7vm" podUID="9317edb7-8cf8-4045-95a9-196349d21055" containerName="marketplace-operator" containerID="cri-o://8e6d09105b00404725a167134f3a289849f386c047d04b9b24ca50f6884d980c" gracePeriod=30 Oct 01 13:49:55 crc kubenswrapper[4605]: I1001 13:49:55.198944 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-xnpnq"] Oct 01 13:49:55 crc kubenswrapper[4605]: I1001 13:49:55.199646 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-xnpnq" podUID="6b4dc6f5-28e8-4c4e-bd69-59fa8a1d60a4" containerName="registry-server" containerID="cri-o://aa7d9d882a3894a1c691b1d18baa45158df6e76aacbd5000e3d225295f609eee" gracePeriod=30 Oct 01 13:49:55 crc kubenswrapper[4605]: I1001 13:49:55.209052 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-pcw9k"] Oct 01 13:49:55 crc kubenswrapper[4605]: I1001 13:49:55.209483 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-pcw9k" podUID="877e04cf-b58e-470f-adc7-8f7fdb0ccddb" containerName="registry-server" containerID="cri-o://b56ae2e3187ee97f84412a6883329488752ab96662194205ee5d9129feda5e1f" gracePeriod=30 Oct 01 13:49:55 crc kubenswrapper[4605]: I1001 13:49:55.238290 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-fhshk"] Oct 01 13:49:55 crc kubenswrapper[4605]: I1001 13:49:55.239634 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-fhshk" Oct 01 13:49:55 crc kubenswrapper[4605]: I1001 13:49:55.259951 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-fhshk"] Oct 01 13:49:55 crc kubenswrapper[4605]: I1001 13:49:55.433238 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rtcb7\" (UniqueName: \"kubernetes.io/projected/78062175-5452-4b18-96df-c602188693fb-kube-api-access-rtcb7\") pod \"marketplace-operator-79b997595-fhshk\" (UID: \"78062175-5452-4b18-96df-c602188693fb\") " pod="openshift-marketplace/marketplace-operator-79b997595-fhshk" Oct 01 13:49:55 crc kubenswrapper[4605]: I1001 13:49:55.433310 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/78062175-5452-4b18-96df-c602188693fb-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-fhshk\" (UID: \"78062175-5452-4b18-96df-c602188693fb\") " pod="openshift-marketplace/marketplace-operator-79b997595-fhshk" Oct 01 13:49:55 crc kubenswrapper[4605]: I1001 13:49:55.437646 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/78062175-5452-4b18-96df-c602188693fb-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-fhshk\" (UID: \"78062175-5452-4b18-96df-c602188693fb\") " pod="openshift-marketplace/marketplace-operator-79b997595-fhshk" Oct 01 13:49:55 crc kubenswrapper[4605]: I1001 13:49:55.538582 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/78062175-5452-4b18-96df-c602188693fb-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-fhshk\" (UID: \"78062175-5452-4b18-96df-c602188693fb\") " pod="openshift-marketplace/marketplace-operator-79b997595-fhshk" Oct 01 13:49:55 crc kubenswrapper[4605]: I1001 13:49:55.538630 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/78062175-5452-4b18-96df-c602188693fb-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-fhshk\" (UID: \"78062175-5452-4b18-96df-c602188693fb\") " pod="openshift-marketplace/marketplace-operator-79b997595-fhshk" Oct 01 13:49:55 crc kubenswrapper[4605]: I1001 13:49:55.538699 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rtcb7\" (UniqueName: \"kubernetes.io/projected/78062175-5452-4b18-96df-c602188693fb-kube-api-access-rtcb7\") pod \"marketplace-operator-79b997595-fhshk\" (UID: \"78062175-5452-4b18-96df-c602188693fb\") " pod="openshift-marketplace/marketplace-operator-79b997595-fhshk" Oct 01 13:49:55 crc kubenswrapper[4605]: I1001 13:49:55.540528 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/78062175-5452-4b18-96df-c602188693fb-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-fhshk\" (UID: \"78062175-5452-4b18-96df-c602188693fb\") " pod="openshift-marketplace/marketplace-operator-79b997595-fhshk" Oct 01 13:49:55 crc kubenswrapper[4605]: I1001 13:49:55.545239 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/78062175-5452-4b18-96df-c602188693fb-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-fhshk\" (UID: \"78062175-5452-4b18-96df-c602188693fb\") " pod="openshift-marketplace/marketplace-operator-79b997595-fhshk" Oct 01 13:49:55 crc kubenswrapper[4605]: I1001 13:49:55.556244 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rtcb7\" (UniqueName: \"kubernetes.io/projected/78062175-5452-4b18-96df-c602188693fb-kube-api-access-rtcb7\") pod \"marketplace-operator-79b997595-fhshk\" (UID: \"78062175-5452-4b18-96df-c602188693fb\") " pod="openshift-marketplace/marketplace-operator-79b997595-fhshk" Oct 01 13:49:55 crc kubenswrapper[4605]: I1001 13:49:55.621410 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-hv7vm" Oct 01 13:49:55 crc kubenswrapper[4605]: I1001 13:49:55.694961 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xnpnq" Oct 01 13:49:55 crc kubenswrapper[4605]: I1001 13:49:55.706601 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-p7h8s" Oct 01 13:49:55 crc kubenswrapper[4605]: I1001 13:49:55.707443 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-z5nlj" Oct 01 13:49:55 crc kubenswrapper[4605]: I1001 13:49:55.722462 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pcw9k" Oct 01 13:49:55 crc kubenswrapper[4605]: I1001 13:49:55.744522 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9317edb7-8cf8-4045-95a9-196349d21055-marketplace-trusted-ca\") pod \"9317edb7-8cf8-4045-95a9-196349d21055\" (UID: \"9317edb7-8cf8-4045-95a9-196349d21055\") " Oct 01 13:49:55 crc kubenswrapper[4605]: I1001 13:49:55.744609 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/9317edb7-8cf8-4045-95a9-196349d21055-marketplace-operator-metrics\") pod \"9317edb7-8cf8-4045-95a9-196349d21055\" (UID: \"9317edb7-8cf8-4045-95a9-196349d21055\") " Oct 01 13:49:55 crc kubenswrapper[4605]: I1001 13:49:55.744670 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9dp4l\" (UniqueName: \"kubernetes.io/projected/9317edb7-8cf8-4045-95a9-196349d21055-kube-api-access-9dp4l\") pod \"9317edb7-8cf8-4045-95a9-196349d21055\" (UID: \"9317edb7-8cf8-4045-95a9-196349d21055\") " Oct 01 13:49:55 crc kubenswrapper[4605]: I1001 13:49:55.749026 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9317edb7-8cf8-4045-95a9-196349d21055-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "9317edb7-8cf8-4045-95a9-196349d21055" (UID: "9317edb7-8cf8-4045-95a9-196349d21055"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:49:55 crc kubenswrapper[4605]: I1001 13:49:55.754148 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9317edb7-8cf8-4045-95a9-196349d21055-kube-api-access-9dp4l" (OuterVolumeSpecName: "kube-api-access-9dp4l") pod "9317edb7-8cf8-4045-95a9-196349d21055" (UID: "9317edb7-8cf8-4045-95a9-196349d21055"). InnerVolumeSpecName "kube-api-access-9dp4l". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:49:55 crc kubenswrapper[4605]: I1001 13:49:55.758190 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9317edb7-8cf8-4045-95a9-196349d21055-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "9317edb7-8cf8-4045-95a9-196349d21055" (UID: "9317edb7-8cf8-4045-95a9-196349d21055"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:49:55 crc kubenswrapper[4605]: I1001 13:49:55.807752 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-fhshk" Oct 01 13:49:55 crc kubenswrapper[4605]: I1001 13:49:55.846474 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xmqsf\" (UniqueName: \"kubernetes.io/projected/fd03b5be-380c-4bc8-aea1-2467ebe8a390-kube-api-access-xmqsf\") pod \"fd03b5be-380c-4bc8-aea1-2467ebe8a390\" (UID: \"fd03b5be-380c-4bc8-aea1-2467ebe8a390\") " Oct 01 13:49:55 crc kubenswrapper[4605]: I1001 13:49:55.847362 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fd03b5be-380c-4bc8-aea1-2467ebe8a390-utilities\") pod \"fd03b5be-380c-4bc8-aea1-2467ebe8a390\" (UID: \"fd03b5be-380c-4bc8-aea1-2467ebe8a390\") " Oct 01 13:49:55 crc kubenswrapper[4605]: I1001 13:49:55.847470 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/370d45c2-c748-4333-bbf5-9f2767f225f5-utilities\") pod \"370d45c2-c748-4333-bbf5-9f2767f225f5\" (UID: \"370d45c2-c748-4333-bbf5-9f2767f225f5\") " Oct 01 13:49:55 crc kubenswrapper[4605]: I1001 13:49:55.847570 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/877e04cf-b58e-470f-adc7-8f7fdb0ccddb-utilities\") pod \"877e04cf-b58e-470f-adc7-8f7fdb0ccddb\" (UID: \"877e04cf-b58e-470f-adc7-8f7fdb0ccddb\") " Oct 01 13:49:55 crc kubenswrapper[4605]: I1001 13:49:55.847687 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/877e04cf-b58e-470f-adc7-8f7fdb0ccddb-catalog-content\") pod \"877e04cf-b58e-470f-adc7-8f7fdb0ccddb\" (UID: \"877e04cf-b58e-470f-adc7-8f7fdb0ccddb\") " Oct 01 13:49:55 crc kubenswrapper[4605]: I1001 13:49:55.847799 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6b4dc6f5-28e8-4c4e-bd69-59fa8a1d60a4-catalog-content\") pod \"6b4dc6f5-28e8-4c4e-bd69-59fa8a1d60a4\" (UID: \"6b4dc6f5-28e8-4c4e-bd69-59fa8a1d60a4\") " Oct 01 13:49:55 crc kubenswrapper[4605]: I1001 13:49:55.847904 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fd03b5be-380c-4bc8-aea1-2467ebe8a390-catalog-content\") pod \"fd03b5be-380c-4bc8-aea1-2467ebe8a390\" (UID: \"fd03b5be-380c-4bc8-aea1-2467ebe8a390\") " Oct 01 13:49:55 crc kubenswrapper[4605]: I1001 13:49:55.848003 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hqn7v\" (UniqueName: \"kubernetes.io/projected/6b4dc6f5-28e8-4c4e-bd69-59fa8a1d60a4-kube-api-access-hqn7v\") pod \"6b4dc6f5-28e8-4c4e-bd69-59fa8a1d60a4\" (UID: \"6b4dc6f5-28e8-4c4e-bd69-59fa8a1d60a4\") " Oct 01 13:49:55 crc kubenswrapper[4605]: I1001 13:49:55.848127 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/370d45c2-c748-4333-bbf5-9f2767f225f5-catalog-content\") pod \"370d45c2-c748-4333-bbf5-9f2767f225f5\" (UID: \"370d45c2-c748-4333-bbf5-9f2767f225f5\") " Oct 01 13:49:55 crc kubenswrapper[4605]: I1001 13:49:55.848245 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4hzng\" (UniqueName: \"kubernetes.io/projected/370d45c2-c748-4333-bbf5-9f2767f225f5-kube-api-access-4hzng\") pod \"370d45c2-c748-4333-bbf5-9f2767f225f5\" (UID: \"370d45c2-c748-4333-bbf5-9f2767f225f5\") " Oct 01 13:49:55 crc kubenswrapper[4605]: I1001 13:49:55.848362 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6qfhb\" (UniqueName: \"kubernetes.io/projected/877e04cf-b58e-470f-adc7-8f7fdb0ccddb-kube-api-access-6qfhb\") pod \"877e04cf-b58e-470f-adc7-8f7fdb0ccddb\" (UID: \"877e04cf-b58e-470f-adc7-8f7fdb0ccddb\") " Oct 01 13:49:55 crc kubenswrapper[4605]: I1001 13:49:55.848748 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6b4dc6f5-28e8-4c4e-bd69-59fa8a1d60a4-utilities\") pod \"6b4dc6f5-28e8-4c4e-bd69-59fa8a1d60a4\" (UID: \"6b4dc6f5-28e8-4c4e-bd69-59fa8a1d60a4\") " Oct 01 13:49:55 crc kubenswrapper[4605]: I1001 13:49:55.849139 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9dp4l\" (UniqueName: \"kubernetes.io/projected/9317edb7-8cf8-4045-95a9-196349d21055-kube-api-access-9dp4l\") on node \"crc\" DevicePath \"\"" Oct 01 13:49:55 crc kubenswrapper[4605]: I1001 13:49:55.849666 4605 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9317edb7-8cf8-4045-95a9-196349d21055-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 01 13:49:55 crc kubenswrapper[4605]: I1001 13:49:55.849701 4605 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/9317edb7-8cf8-4045-95a9-196349d21055-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Oct 01 13:49:55 crc kubenswrapper[4605]: I1001 13:49:55.850577 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/370d45c2-c748-4333-bbf5-9f2767f225f5-utilities" (OuterVolumeSpecName: "utilities") pod "370d45c2-c748-4333-bbf5-9f2767f225f5" (UID: "370d45c2-c748-4333-bbf5-9f2767f225f5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 13:49:55 crc kubenswrapper[4605]: I1001 13:49:55.850776 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6b4dc6f5-28e8-4c4e-bd69-59fa8a1d60a4-utilities" (OuterVolumeSpecName: "utilities") pod "6b4dc6f5-28e8-4c4e-bd69-59fa8a1d60a4" (UID: "6b4dc6f5-28e8-4c4e-bd69-59fa8a1d60a4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 13:49:55 crc kubenswrapper[4605]: I1001 13:49:55.851535 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/877e04cf-b58e-470f-adc7-8f7fdb0ccddb-utilities" (OuterVolumeSpecName: "utilities") pod "877e04cf-b58e-470f-adc7-8f7fdb0ccddb" (UID: "877e04cf-b58e-470f-adc7-8f7fdb0ccddb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 13:49:55 crc kubenswrapper[4605]: I1001 13:49:55.852727 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/877e04cf-b58e-470f-adc7-8f7fdb0ccddb-kube-api-access-6qfhb" (OuterVolumeSpecName: "kube-api-access-6qfhb") pod "877e04cf-b58e-470f-adc7-8f7fdb0ccddb" (UID: "877e04cf-b58e-470f-adc7-8f7fdb0ccddb"). InnerVolumeSpecName "kube-api-access-6qfhb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:49:55 crc kubenswrapper[4605]: I1001 13:49:55.853769 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/370d45c2-c748-4333-bbf5-9f2767f225f5-kube-api-access-4hzng" (OuterVolumeSpecName: "kube-api-access-4hzng") pod "370d45c2-c748-4333-bbf5-9f2767f225f5" (UID: "370d45c2-c748-4333-bbf5-9f2767f225f5"). InnerVolumeSpecName "kube-api-access-4hzng". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:49:55 crc kubenswrapper[4605]: I1001 13:49:55.862190 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6b4dc6f5-28e8-4c4e-bd69-59fa8a1d60a4-kube-api-access-hqn7v" (OuterVolumeSpecName: "kube-api-access-hqn7v") pod "6b4dc6f5-28e8-4c4e-bd69-59fa8a1d60a4" (UID: "6b4dc6f5-28e8-4c4e-bd69-59fa8a1d60a4"). InnerVolumeSpecName "kube-api-access-hqn7v". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:49:55 crc kubenswrapper[4605]: I1001 13:49:55.862527 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6b4dc6f5-28e8-4c4e-bd69-59fa8a1d60a4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6b4dc6f5-28e8-4c4e-bd69-59fa8a1d60a4" (UID: "6b4dc6f5-28e8-4c4e-bd69-59fa8a1d60a4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 13:49:55 crc kubenswrapper[4605]: I1001 13:49:55.865245 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fd03b5be-380c-4bc8-aea1-2467ebe8a390-kube-api-access-xmqsf" (OuterVolumeSpecName: "kube-api-access-xmqsf") pod "fd03b5be-380c-4bc8-aea1-2467ebe8a390" (UID: "fd03b5be-380c-4bc8-aea1-2467ebe8a390"). InnerVolumeSpecName "kube-api-access-xmqsf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:49:55 crc kubenswrapper[4605]: I1001 13:49:55.870218 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fd03b5be-380c-4bc8-aea1-2467ebe8a390-utilities" (OuterVolumeSpecName: "utilities") pod "fd03b5be-380c-4bc8-aea1-2467ebe8a390" (UID: "fd03b5be-380c-4bc8-aea1-2467ebe8a390"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 13:49:55 crc kubenswrapper[4605]: I1001 13:49:55.926263 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fd03b5be-380c-4bc8-aea1-2467ebe8a390-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fd03b5be-380c-4bc8-aea1-2467ebe8a390" (UID: "fd03b5be-380c-4bc8-aea1-2467ebe8a390"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 13:49:55 crc kubenswrapper[4605]: I1001 13:49:55.930194 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/370d45c2-c748-4333-bbf5-9f2767f225f5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "370d45c2-c748-4333-bbf5-9f2767f225f5" (UID: "370d45c2-c748-4333-bbf5-9f2767f225f5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 13:49:55 crc kubenswrapper[4605]: I1001 13:49:55.951615 4605 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6b4dc6f5-28e8-4c4e-bd69-59fa8a1d60a4-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 13:49:55 crc kubenswrapper[4605]: I1001 13:49:55.951845 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xmqsf\" (UniqueName: \"kubernetes.io/projected/fd03b5be-380c-4bc8-aea1-2467ebe8a390-kube-api-access-xmqsf\") on node \"crc\" DevicePath \"\"" Oct 01 13:49:55 crc kubenswrapper[4605]: I1001 13:49:55.951927 4605 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fd03b5be-380c-4bc8-aea1-2467ebe8a390-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 13:49:55 crc kubenswrapper[4605]: I1001 13:49:55.952024 4605 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/370d45c2-c748-4333-bbf5-9f2767f225f5-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 13:49:55 crc kubenswrapper[4605]: I1001 13:49:55.952110 4605 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/877e04cf-b58e-470f-adc7-8f7fdb0ccddb-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 13:49:55 crc kubenswrapper[4605]: I1001 13:49:55.952204 4605 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6b4dc6f5-28e8-4c4e-bd69-59fa8a1d60a4-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 13:49:55 crc kubenswrapper[4605]: I1001 13:49:55.952291 4605 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fd03b5be-380c-4bc8-aea1-2467ebe8a390-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 13:49:55 crc kubenswrapper[4605]: I1001 13:49:55.952356 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hqn7v\" (UniqueName: \"kubernetes.io/projected/6b4dc6f5-28e8-4c4e-bd69-59fa8a1d60a4-kube-api-access-hqn7v\") on node \"crc\" DevicePath \"\"" Oct 01 13:49:55 crc kubenswrapper[4605]: I1001 13:49:55.952422 4605 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/370d45c2-c748-4333-bbf5-9f2767f225f5-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 13:49:55 crc kubenswrapper[4605]: I1001 13:49:55.952492 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4hzng\" (UniqueName: \"kubernetes.io/projected/370d45c2-c748-4333-bbf5-9f2767f225f5-kube-api-access-4hzng\") on node \"crc\" DevicePath \"\"" Oct 01 13:49:55 crc kubenswrapper[4605]: I1001 13:49:55.952556 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6qfhb\" (UniqueName: \"kubernetes.io/projected/877e04cf-b58e-470f-adc7-8f7fdb0ccddb-kube-api-access-6qfhb\") on node \"crc\" DevicePath \"\"" Oct 01 13:49:55 crc kubenswrapper[4605]: I1001 13:49:55.964664 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/877e04cf-b58e-470f-adc7-8f7fdb0ccddb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "877e04cf-b58e-470f-adc7-8f7fdb0ccddb" (UID: "877e04cf-b58e-470f-adc7-8f7fdb0ccddb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 13:49:56 crc kubenswrapper[4605]: I1001 13:49:56.054250 4605 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/877e04cf-b58e-470f-adc7-8f7fdb0ccddb-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 13:49:56 crc kubenswrapper[4605]: I1001 13:49:56.139854 4605 generic.go:334] "Generic (PLEG): container finished" podID="fd03b5be-380c-4bc8-aea1-2467ebe8a390" containerID="1f3bf5d2ba2aa154a818d8932d87b9ec7a3de3281efc343141b81494db058af2" exitCode=0 Oct 01 13:49:56 crc kubenswrapper[4605]: I1001 13:49:56.139907 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-p7h8s" event={"ID":"fd03b5be-380c-4bc8-aea1-2467ebe8a390","Type":"ContainerDied","Data":"1f3bf5d2ba2aa154a818d8932d87b9ec7a3de3281efc343141b81494db058af2"} Oct 01 13:49:56 crc kubenswrapper[4605]: I1001 13:49:56.139948 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-p7h8s" event={"ID":"fd03b5be-380c-4bc8-aea1-2467ebe8a390","Type":"ContainerDied","Data":"d7ee2a35530ef79df850dfe17e834aafaa957ec139c177604d3df761ea318428"} Oct 01 13:49:56 crc kubenswrapper[4605]: I1001 13:49:56.139966 4605 scope.go:117] "RemoveContainer" containerID="1f3bf5d2ba2aa154a818d8932d87b9ec7a3de3281efc343141b81494db058af2" Oct 01 13:49:56 crc kubenswrapper[4605]: I1001 13:49:56.139990 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-p7h8s" Oct 01 13:49:56 crc kubenswrapper[4605]: I1001 13:49:56.143707 4605 generic.go:334] "Generic (PLEG): container finished" podID="877e04cf-b58e-470f-adc7-8f7fdb0ccddb" containerID="b56ae2e3187ee97f84412a6883329488752ab96662194205ee5d9129feda5e1f" exitCode=0 Oct 01 13:49:56 crc kubenswrapper[4605]: I1001 13:49:56.143762 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pcw9k" event={"ID":"877e04cf-b58e-470f-adc7-8f7fdb0ccddb","Type":"ContainerDied","Data":"b56ae2e3187ee97f84412a6883329488752ab96662194205ee5d9129feda5e1f"} Oct 01 13:49:56 crc kubenswrapper[4605]: I1001 13:49:56.143797 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pcw9k" event={"ID":"877e04cf-b58e-470f-adc7-8f7fdb0ccddb","Type":"ContainerDied","Data":"09cc7399d65e74604846511f1e51791d8f006c44d91382361bda0852951b1376"} Oct 01 13:49:56 crc kubenswrapper[4605]: I1001 13:49:56.143872 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pcw9k" Oct 01 13:49:56 crc kubenswrapper[4605]: I1001 13:49:56.146891 4605 generic.go:334] "Generic (PLEG): container finished" podID="6b4dc6f5-28e8-4c4e-bd69-59fa8a1d60a4" containerID="aa7d9d882a3894a1c691b1d18baa45158df6e76aacbd5000e3d225295f609eee" exitCode=0 Oct 01 13:49:56 crc kubenswrapper[4605]: I1001 13:49:56.146968 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xnpnq" event={"ID":"6b4dc6f5-28e8-4c4e-bd69-59fa8a1d60a4","Type":"ContainerDied","Data":"aa7d9d882a3894a1c691b1d18baa45158df6e76aacbd5000e3d225295f609eee"} Oct 01 13:49:56 crc kubenswrapper[4605]: I1001 13:49:56.146999 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xnpnq" event={"ID":"6b4dc6f5-28e8-4c4e-bd69-59fa8a1d60a4","Type":"ContainerDied","Data":"a1ae50b660c1539bd2cea95d8e3326e87ebbf043ce04dfa25b021164d6987def"} Oct 01 13:49:56 crc kubenswrapper[4605]: I1001 13:49:56.147207 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xnpnq" Oct 01 13:49:56 crc kubenswrapper[4605]: I1001 13:49:56.151167 4605 generic.go:334] "Generic (PLEG): container finished" podID="9317edb7-8cf8-4045-95a9-196349d21055" containerID="8e6d09105b00404725a167134f3a289849f386c047d04b9b24ca50f6884d980c" exitCode=0 Oct 01 13:49:56 crc kubenswrapper[4605]: I1001 13:49:56.151249 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-hv7vm" Oct 01 13:49:56 crc kubenswrapper[4605]: I1001 13:49:56.151257 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-hv7vm" event={"ID":"9317edb7-8cf8-4045-95a9-196349d21055","Type":"ContainerDied","Data":"8e6d09105b00404725a167134f3a289849f386c047d04b9b24ca50f6884d980c"} Oct 01 13:49:56 crc kubenswrapper[4605]: I1001 13:49:56.151370 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-hv7vm" event={"ID":"9317edb7-8cf8-4045-95a9-196349d21055","Type":"ContainerDied","Data":"5d075d7a237a127cea87374eed17f67c54cf153d8e443cc6ecdfcb7c1a4da08d"} Oct 01 13:49:56 crc kubenswrapper[4605]: I1001 13:49:56.158543 4605 generic.go:334] "Generic (PLEG): container finished" podID="370d45c2-c748-4333-bbf5-9f2767f225f5" containerID="1f796fbd2b97c4de216532222df4369dd6bffcf4e02bf7bce4be5de98d0455c3" exitCode=0 Oct 01 13:49:56 crc kubenswrapper[4605]: I1001 13:49:56.158588 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z5nlj" event={"ID":"370d45c2-c748-4333-bbf5-9f2767f225f5","Type":"ContainerDied","Data":"1f796fbd2b97c4de216532222df4369dd6bffcf4e02bf7bce4be5de98d0455c3"} Oct 01 13:49:56 crc kubenswrapper[4605]: I1001 13:49:56.158622 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z5nlj" event={"ID":"370d45c2-c748-4333-bbf5-9f2767f225f5","Type":"ContainerDied","Data":"fe2149bc7fa24528c87436a854f70b26eebc9f9e5760ea88d8079fa386064b41"} Oct 01 13:49:56 crc kubenswrapper[4605]: I1001 13:49:56.158730 4605 scope.go:117] "RemoveContainer" containerID="3966dec7aff9f90cd2b367ecd82d1a71628e7c733f6ae5ac2fd9e443bd86a3ab" Oct 01 13:49:56 crc kubenswrapper[4605]: I1001 13:49:56.158733 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-z5nlj" Oct 01 13:49:56 crc kubenswrapper[4605]: I1001 13:49:56.173952 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-p7h8s"] Oct 01 13:49:56 crc kubenswrapper[4605]: I1001 13:49:56.178624 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-p7h8s"] Oct 01 13:49:56 crc kubenswrapper[4605]: I1001 13:49:56.191681 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-hv7vm"] Oct 01 13:49:56 crc kubenswrapper[4605]: I1001 13:49:56.196913 4605 scope.go:117] "RemoveContainer" containerID="ffed4ee6a730dc72dc7f028320b8146a2c83b8126433e3c9f25d87fa4814d2cc" Oct 01 13:49:56 crc kubenswrapper[4605]: I1001 13:49:56.198551 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-hv7vm"] Oct 01 13:49:56 crc kubenswrapper[4605]: I1001 13:49:56.207997 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-z5nlj"] Oct 01 13:49:56 crc kubenswrapper[4605]: I1001 13:49:56.216660 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-z5nlj"] Oct 01 13:49:56 crc kubenswrapper[4605]: I1001 13:49:56.231179 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-pcw9k"] Oct 01 13:49:56 crc kubenswrapper[4605]: I1001 13:49:56.232715 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-pcw9k"] Oct 01 13:49:56 crc kubenswrapper[4605]: I1001 13:49:56.239903 4605 scope.go:117] "RemoveContainer" containerID="1f3bf5d2ba2aa154a818d8932d87b9ec7a3de3281efc343141b81494db058af2" Oct 01 13:49:56 crc kubenswrapper[4605]: E1001 13:49:56.240882 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1f3bf5d2ba2aa154a818d8932d87b9ec7a3de3281efc343141b81494db058af2\": container with ID starting with 1f3bf5d2ba2aa154a818d8932d87b9ec7a3de3281efc343141b81494db058af2 not found: ID does not exist" containerID="1f3bf5d2ba2aa154a818d8932d87b9ec7a3de3281efc343141b81494db058af2" Oct 01 13:49:56 crc kubenswrapper[4605]: I1001 13:49:56.240928 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1f3bf5d2ba2aa154a818d8932d87b9ec7a3de3281efc343141b81494db058af2"} err="failed to get container status \"1f3bf5d2ba2aa154a818d8932d87b9ec7a3de3281efc343141b81494db058af2\": rpc error: code = NotFound desc = could not find container \"1f3bf5d2ba2aa154a818d8932d87b9ec7a3de3281efc343141b81494db058af2\": container with ID starting with 1f3bf5d2ba2aa154a818d8932d87b9ec7a3de3281efc343141b81494db058af2 not found: ID does not exist" Oct 01 13:49:56 crc kubenswrapper[4605]: I1001 13:49:56.240961 4605 scope.go:117] "RemoveContainer" containerID="3966dec7aff9f90cd2b367ecd82d1a71628e7c733f6ae5ac2fd9e443bd86a3ab" Oct 01 13:49:56 crc kubenswrapper[4605]: E1001 13:49:56.242999 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3966dec7aff9f90cd2b367ecd82d1a71628e7c733f6ae5ac2fd9e443bd86a3ab\": container with ID starting with 3966dec7aff9f90cd2b367ecd82d1a71628e7c733f6ae5ac2fd9e443bd86a3ab not found: ID does not exist" containerID="3966dec7aff9f90cd2b367ecd82d1a71628e7c733f6ae5ac2fd9e443bd86a3ab" Oct 01 13:49:56 crc kubenswrapper[4605]: I1001 13:49:56.243033 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3966dec7aff9f90cd2b367ecd82d1a71628e7c733f6ae5ac2fd9e443bd86a3ab"} err="failed to get container status \"3966dec7aff9f90cd2b367ecd82d1a71628e7c733f6ae5ac2fd9e443bd86a3ab\": rpc error: code = NotFound desc = could not find container \"3966dec7aff9f90cd2b367ecd82d1a71628e7c733f6ae5ac2fd9e443bd86a3ab\": container with ID starting with 3966dec7aff9f90cd2b367ecd82d1a71628e7c733f6ae5ac2fd9e443bd86a3ab not found: ID does not exist" Oct 01 13:49:56 crc kubenswrapper[4605]: I1001 13:49:56.243059 4605 scope.go:117] "RemoveContainer" containerID="ffed4ee6a730dc72dc7f028320b8146a2c83b8126433e3c9f25d87fa4814d2cc" Oct 01 13:49:56 crc kubenswrapper[4605]: E1001 13:49:56.243415 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ffed4ee6a730dc72dc7f028320b8146a2c83b8126433e3c9f25d87fa4814d2cc\": container with ID starting with ffed4ee6a730dc72dc7f028320b8146a2c83b8126433e3c9f25d87fa4814d2cc not found: ID does not exist" containerID="ffed4ee6a730dc72dc7f028320b8146a2c83b8126433e3c9f25d87fa4814d2cc" Oct 01 13:49:56 crc kubenswrapper[4605]: I1001 13:49:56.243444 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ffed4ee6a730dc72dc7f028320b8146a2c83b8126433e3c9f25d87fa4814d2cc"} err="failed to get container status \"ffed4ee6a730dc72dc7f028320b8146a2c83b8126433e3c9f25d87fa4814d2cc\": rpc error: code = NotFound desc = could not find container \"ffed4ee6a730dc72dc7f028320b8146a2c83b8126433e3c9f25d87fa4814d2cc\": container with ID starting with ffed4ee6a730dc72dc7f028320b8146a2c83b8126433e3c9f25d87fa4814d2cc not found: ID does not exist" Oct 01 13:49:56 crc kubenswrapper[4605]: I1001 13:49:56.243461 4605 scope.go:117] "RemoveContainer" containerID="b56ae2e3187ee97f84412a6883329488752ab96662194205ee5d9129feda5e1f" Oct 01 13:49:56 crc kubenswrapper[4605]: I1001 13:49:56.247353 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-xnpnq"] Oct 01 13:49:56 crc kubenswrapper[4605]: I1001 13:49:56.251173 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-xnpnq"] Oct 01 13:49:56 crc kubenswrapper[4605]: I1001 13:49:56.261260 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-fhshk"] Oct 01 13:49:56 crc kubenswrapper[4605]: I1001 13:49:56.261748 4605 scope.go:117] "RemoveContainer" containerID="3455e2323e0b7293b216dc6b4f3798bcbd195fd6416ed9cc07207b5704edfafe" Oct 01 13:49:56 crc kubenswrapper[4605]: I1001 13:49:56.276916 4605 scope.go:117] "RemoveContainer" containerID="71748170473e3b9e4ef254ae21ee6dcafa847c4b275cf3df8038a68742fa1408" Oct 01 13:49:56 crc kubenswrapper[4605]: I1001 13:49:56.300331 4605 scope.go:117] "RemoveContainer" containerID="b56ae2e3187ee97f84412a6883329488752ab96662194205ee5d9129feda5e1f" Oct 01 13:49:56 crc kubenswrapper[4605]: E1001 13:49:56.301489 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b56ae2e3187ee97f84412a6883329488752ab96662194205ee5d9129feda5e1f\": container with ID starting with b56ae2e3187ee97f84412a6883329488752ab96662194205ee5d9129feda5e1f not found: ID does not exist" containerID="b56ae2e3187ee97f84412a6883329488752ab96662194205ee5d9129feda5e1f" Oct 01 13:49:56 crc kubenswrapper[4605]: I1001 13:49:56.301528 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b56ae2e3187ee97f84412a6883329488752ab96662194205ee5d9129feda5e1f"} err="failed to get container status \"b56ae2e3187ee97f84412a6883329488752ab96662194205ee5d9129feda5e1f\": rpc error: code = NotFound desc = could not find container \"b56ae2e3187ee97f84412a6883329488752ab96662194205ee5d9129feda5e1f\": container with ID starting with b56ae2e3187ee97f84412a6883329488752ab96662194205ee5d9129feda5e1f not found: ID does not exist" Oct 01 13:49:56 crc kubenswrapper[4605]: I1001 13:49:56.301552 4605 scope.go:117] "RemoveContainer" containerID="3455e2323e0b7293b216dc6b4f3798bcbd195fd6416ed9cc07207b5704edfafe" Oct 01 13:49:56 crc kubenswrapper[4605]: E1001 13:49:56.301816 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3455e2323e0b7293b216dc6b4f3798bcbd195fd6416ed9cc07207b5704edfafe\": container with ID starting with 3455e2323e0b7293b216dc6b4f3798bcbd195fd6416ed9cc07207b5704edfafe not found: ID does not exist" containerID="3455e2323e0b7293b216dc6b4f3798bcbd195fd6416ed9cc07207b5704edfafe" Oct 01 13:49:56 crc kubenswrapper[4605]: I1001 13:49:56.301842 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3455e2323e0b7293b216dc6b4f3798bcbd195fd6416ed9cc07207b5704edfafe"} err="failed to get container status \"3455e2323e0b7293b216dc6b4f3798bcbd195fd6416ed9cc07207b5704edfafe\": rpc error: code = NotFound desc = could not find container \"3455e2323e0b7293b216dc6b4f3798bcbd195fd6416ed9cc07207b5704edfafe\": container with ID starting with 3455e2323e0b7293b216dc6b4f3798bcbd195fd6416ed9cc07207b5704edfafe not found: ID does not exist" Oct 01 13:49:56 crc kubenswrapper[4605]: I1001 13:49:56.301857 4605 scope.go:117] "RemoveContainer" containerID="71748170473e3b9e4ef254ae21ee6dcafa847c4b275cf3df8038a68742fa1408" Oct 01 13:49:56 crc kubenswrapper[4605]: E1001 13:49:56.302483 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"71748170473e3b9e4ef254ae21ee6dcafa847c4b275cf3df8038a68742fa1408\": container with ID starting with 71748170473e3b9e4ef254ae21ee6dcafa847c4b275cf3df8038a68742fa1408 not found: ID does not exist" containerID="71748170473e3b9e4ef254ae21ee6dcafa847c4b275cf3df8038a68742fa1408" Oct 01 13:49:56 crc kubenswrapper[4605]: I1001 13:49:56.302539 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"71748170473e3b9e4ef254ae21ee6dcafa847c4b275cf3df8038a68742fa1408"} err="failed to get container status \"71748170473e3b9e4ef254ae21ee6dcafa847c4b275cf3df8038a68742fa1408\": rpc error: code = NotFound desc = could not find container \"71748170473e3b9e4ef254ae21ee6dcafa847c4b275cf3df8038a68742fa1408\": container with ID starting with 71748170473e3b9e4ef254ae21ee6dcafa847c4b275cf3df8038a68742fa1408 not found: ID does not exist" Oct 01 13:49:56 crc kubenswrapper[4605]: I1001 13:49:56.302572 4605 scope.go:117] "RemoveContainer" containerID="aa7d9d882a3894a1c691b1d18baa45158df6e76aacbd5000e3d225295f609eee" Oct 01 13:49:56 crc kubenswrapper[4605]: I1001 13:49:56.361653 4605 scope.go:117] "RemoveContainer" containerID="5bad6e862677d8f2643561897b8598523b01d56382f59800064637be694858b1" Oct 01 13:49:56 crc kubenswrapper[4605]: I1001 13:49:56.379416 4605 scope.go:117] "RemoveContainer" containerID="491e68479872bb407b194aa011b816646f5a89a27b0d8eb9a873ce2e1ea967fa" Oct 01 13:49:56 crc kubenswrapper[4605]: I1001 13:49:56.407322 4605 scope.go:117] "RemoveContainer" containerID="aa7d9d882a3894a1c691b1d18baa45158df6e76aacbd5000e3d225295f609eee" Oct 01 13:49:56 crc kubenswrapper[4605]: E1001 13:49:56.407684 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aa7d9d882a3894a1c691b1d18baa45158df6e76aacbd5000e3d225295f609eee\": container with ID starting with aa7d9d882a3894a1c691b1d18baa45158df6e76aacbd5000e3d225295f609eee not found: ID does not exist" containerID="aa7d9d882a3894a1c691b1d18baa45158df6e76aacbd5000e3d225295f609eee" Oct 01 13:49:56 crc kubenswrapper[4605]: I1001 13:49:56.407714 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aa7d9d882a3894a1c691b1d18baa45158df6e76aacbd5000e3d225295f609eee"} err="failed to get container status \"aa7d9d882a3894a1c691b1d18baa45158df6e76aacbd5000e3d225295f609eee\": rpc error: code = NotFound desc = could not find container \"aa7d9d882a3894a1c691b1d18baa45158df6e76aacbd5000e3d225295f609eee\": container with ID starting with aa7d9d882a3894a1c691b1d18baa45158df6e76aacbd5000e3d225295f609eee not found: ID does not exist" Oct 01 13:49:56 crc kubenswrapper[4605]: I1001 13:49:56.407741 4605 scope.go:117] "RemoveContainer" containerID="5bad6e862677d8f2643561897b8598523b01d56382f59800064637be694858b1" Oct 01 13:49:56 crc kubenswrapper[4605]: E1001 13:49:56.408180 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5bad6e862677d8f2643561897b8598523b01d56382f59800064637be694858b1\": container with ID starting with 5bad6e862677d8f2643561897b8598523b01d56382f59800064637be694858b1 not found: ID does not exist" containerID="5bad6e862677d8f2643561897b8598523b01d56382f59800064637be694858b1" Oct 01 13:49:56 crc kubenswrapper[4605]: I1001 13:49:56.408201 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5bad6e862677d8f2643561897b8598523b01d56382f59800064637be694858b1"} err="failed to get container status \"5bad6e862677d8f2643561897b8598523b01d56382f59800064637be694858b1\": rpc error: code = NotFound desc = could not find container \"5bad6e862677d8f2643561897b8598523b01d56382f59800064637be694858b1\": container with ID starting with 5bad6e862677d8f2643561897b8598523b01d56382f59800064637be694858b1 not found: ID does not exist" Oct 01 13:49:56 crc kubenswrapper[4605]: I1001 13:49:56.408217 4605 scope.go:117] "RemoveContainer" containerID="491e68479872bb407b194aa011b816646f5a89a27b0d8eb9a873ce2e1ea967fa" Oct 01 13:49:56 crc kubenswrapper[4605]: E1001 13:49:56.409256 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"491e68479872bb407b194aa011b816646f5a89a27b0d8eb9a873ce2e1ea967fa\": container with ID starting with 491e68479872bb407b194aa011b816646f5a89a27b0d8eb9a873ce2e1ea967fa not found: ID does not exist" containerID="491e68479872bb407b194aa011b816646f5a89a27b0d8eb9a873ce2e1ea967fa" Oct 01 13:49:56 crc kubenswrapper[4605]: I1001 13:49:56.409291 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"491e68479872bb407b194aa011b816646f5a89a27b0d8eb9a873ce2e1ea967fa"} err="failed to get container status \"491e68479872bb407b194aa011b816646f5a89a27b0d8eb9a873ce2e1ea967fa\": rpc error: code = NotFound desc = could not find container \"491e68479872bb407b194aa011b816646f5a89a27b0d8eb9a873ce2e1ea967fa\": container with ID starting with 491e68479872bb407b194aa011b816646f5a89a27b0d8eb9a873ce2e1ea967fa not found: ID does not exist" Oct 01 13:49:56 crc kubenswrapper[4605]: I1001 13:49:56.409308 4605 scope.go:117] "RemoveContainer" containerID="8e6d09105b00404725a167134f3a289849f386c047d04b9b24ca50f6884d980c" Oct 01 13:49:56 crc kubenswrapper[4605]: I1001 13:49:56.433293 4605 scope.go:117] "RemoveContainer" containerID="8e6d09105b00404725a167134f3a289849f386c047d04b9b24ca50f6884d980c" Oct 01 13:49:56 crc kubenswrapper[4605]: E1001 13:49:56.433963 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8e6d09105b00404725a167134f3a289849f386c047d04b9b24ca50f6884d980c\": container with ID starting with 8e6d09105b00404725a167134f3a289849f386c047d04b9b24ca50f6884d980c not found: ID does not exist" containerID="8e6d09105b00404725a167134f3a289849f386c047d04b9b24ca50f6884d980c" Oct 01 13:49:56 crc kubenswrapper[4605]: I1001 13:49:56.434022 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8e6d09105b00404725a167134f3a289849f386c047d04b9b24ca50f6884d980c"} err="failed to get container status \"8e6d09105b00404725a167134f3a289849f386c047d04b9b24ca50f6884d980c\": rpc error: code = NotFound desc = could not find container \"8e6d09105b00404725a167134f3a289849f386c047d04b9b24ca50f6884d980c\": container with ID starting with 8e6d09105b00404725a167134f3a289849f386c047d04b9b24ca50f6884d980c not found: ID does not exist" Oct 01 13:49:56 crc kubenswrapper[4605]: I1001 13:49:56.434048 4605 scope.go:117] "RemoveContainer" containerID="1f796fbd2b97c4de216532222df4369dd6bffcf4e02bf7bce4be5de98d0455c3" Oct 01 13:49:56 crc kubenswrapper[4605]: I1001 13:49:56.447322 4605 scope.go:117] "RemoveContainer" containerID="133457b2c528604fff5e2bdd4b5d023bb8dac2116a71c5363cacc5e46ae7283d" Oct 01 13:49:56 crc kubenswrapper[4605]: I1001 13:49:56.459749 4605 scope.go:117] "RemoveContainer" containerID="40891b62860c6f16833eed3299afba11b7486b5f1565ceff8dfe731bf1245a10" Oct 01 13:49:56 crc kubenswrapper[4605]: I1001 13:49:56.482044 4605 scope.go:117] "RemoveContainer" containerID="1f796fbd2b97c4de216532222df4369dd6bffcf4e02bf7bce4be5de98d0455c3" Oct 01 13:49:56 crc kubenswrapper[4605]: E1001 13:49:56.482476 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1f796fbd2b97c4de216532222df4369dd6bffcf4e02bf7bce4be5de98d0455c3\": container with ID starting with 1f796fbd2b97c4de216532222df4369dd6bffcf4e02bf7bce4be5de98d0455c3 not found: ID does not exist" containerID="1f796fbd2b97c4de216532222df4369dd6bffcf4e02bf7bce4be5de98d0455c3" Oct 01 13:49:56 crc kubenswrapper[4605]: I1001 13:49:56.482501 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1f796fbd2b97c4de216532222df4369dd6bffcf4e02bf7bce4be5de98d0455c3"} err="failed to get container status \"1f796fbd2b97c4de216532222df4369dd6bffcf4e02bf7bce4be5de98d0455c3\": rpc error: code = NotFound desc = could not find container \"1f796fbd2b97c4de216532222df4369dd6bffcf4e02bf7bce4be5de98d0455c3\": container with ID starting with 1f796fbd2b97c4de216532222df4369dd6bffcf4e02bf7bce4be5de98d0455c3 not found: ID does not exist" Oct 01 13:49:56 crc kubenswrapper[4605]: I1001 13:49:56.482522 4605 scope.go:117] "RemoveContainer" containerID="133457b2c528604fff5e2bdd4b5d023bb8dac2116a71c5363cacc5e46ae7283d" Oct 01 13:49:56 crc kubenswrapper[4605]: E1001 13:49:56.482905 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"133457b2c528604fff5e2bdd4b5d023bb8dac2116a71c5363cacc5e46ae7283d\": container with ID starting with 133457b2c528604fff5e2bdd4b5d023bb8dac2116a71c5363cacc5e46ae7283d not found: ID does not exist" containerID="133457b2c528604fff5e2bdd4b5d023bb8dac2116a71c5363cacc5e46ae7283d" Oct 01 13:49:56 crc kubenswrapper[4605]: I1001 13:49:56.482925 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"133457b2c528604fff5e2bdd4b5d023bb8dac2116a71c5363cacc5e46ae7283d"} err="failed to get container status \"133457b2c528604fff5e2bdd4b5d023bb8dac2116a71c5363cacc5e46ae7283d\": rpc error: code = NotFound desc = could not find container \"133457b2c528604fff5e2bdd4b5d023bb8dac2116a71c5363cacc5e46ae7283d\": container with ID starting with 133457b2c528604fff5e2bdd4b5d023bb8dac2116a71c5363cacc5e46ae7283d not found: ID does not exist" Oct 01 13:49:56 crc kubenswrapper[4605]: I1001 13:49:56.482953 4605 scope.go:117] "RemoveContainer" containerID="40891b62860c6f16833eed3299afba11b7486b5f1565ceff8dfe731bf1245a10" Oct 01 13:49:56 crc kubenswrapper[4605]: E1001 13:49:56.483260 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"40891b62860c6f16833eed3299afba11b7486b5f1565ceff8dfe731bf1245a10\": container with ID starting with 40891b62860c6f16833eed3299afba11b7486b5f1565ceff8dfe731bf1245a10 not found: ID does not exist" containerID="40891b62860c6f16833eed3299afba11b7486b5f1565ceff8dfe731bf1245a10" Oct 01 13:49:56 crc kubenswrapper[4605]: I1001 13:49:56.483280 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"40891b62860c6f16833eed3299afba11b7486b5f1565ceff8dfe731bf1245a10"} err="failed to get container status \"40891b62860c6f16833eed3299afba11b7486b5f1565ceff8dfe731bf1245a10\": rpc error: code = NotFound desc = could not find container \"40891b62860c6f16833eed3299afba11b7486b5f1565ceff8dfe731bf1245a10\": container with ID starting with 40891b62860c6f16833eed3299afba11b7486b5f1565ceff8dfe731bf1245a10 not found: ID does not exist" Oct 01 13:49:57 crc kubenswrapper[4605]: I1001 13:49:57.168804 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-fhshk" event={"ID":"78062175-5452-4b18-96df-c602188693fb","Type":"ContainerStarted","Data":"3656f6aaf360235f3467c6f2d2a9ce8a3f586f6db7cf2c079218e5fac9d7aeed"} Oct 01 13:49:57 crc kubenswrapper[4605]: I1001 13:49:57.168853 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-fhshk" event={"ID":"78062175-5452-4b18-96df-c602188693fb","Type":"ContainerStarted","Data":"e3f6b658dd8434ce56fe54a7074786646980e13f4c260674d05001996483eb4e"} Oct 01 13:49:57 crc kubenswrapper[4605]: I1001 13:49:57.170135 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-fhshk" Oct 01 13:49:57 crc kubenswrapper[4605]: I1001 13:49:57.174530 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-fhshk" Oct 01 13:49:57 crc kubenswrapper[4605]: I1001 13:49:57.189521 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-fhshk" podStartSLOduration=2.189497099 podStartE2EDuration="2.189497099s" podCreationTimestamp="2025-10-01 13:49:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 13:49:57.188228726 +0000 UTC m=+319.932204944" watchObservedRunningTime="2025-10-01 13:49:57.189497099 +0000 UTC m=+319.933473327" Oct 01 13:49:57 crc kubenswrapper[4605]: I1001 13:49:57.375398 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-xgctj"] Oct 01 13:49:57 crc kubenswrapper[4605]: E1001 13:49:57.375873 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="370d45c2-c748-4333-bbf5-9f2767f225f5" containerName="extract-content" Oct 01 13:49:57 crc kubenswrapper[4605]: I1001 13:49:57.375887 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="370d45c2-c748-4333-bbf5-9f2767f225f5" containerName="extract-content" Oct 01 13:49:57 crc kubenswrapper[4605]: E1001 13:49:57.375898 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="370d45c2-c748-4333-bbf5-9f2767f225f5" containerName="registry-server" Oct 01 13:49:57 crc kubenswrapper[4605]: I1001 13:49:57.375904 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="370d45c2-c748-4333-bbf5-9f2767f225f5" containerName="registry-server" Oct 01 13:49:57 crc kubenswrapper[4605]: E1001 13:49:57.375916 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="370d45c2-c748-4333-bbf5-9f2767f225f5" containerName="extract-utilities" Oct 01 13:49:57 crc kubenswrapper[4605]: I1001 13:49:57.375922 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="370d45c2-c748-4333-bbf5-9f2767f225f5" containerName="extract-utilities" Oct 01 13:49:57 crc kubenswrapper[4605]: E1001 13:49:57.375930 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b4dc6f5-28e8-4c4e-bd69-59fa8a1d60a4" containerName="registry-server" Oct 01 13:49:57 crc kubenswrapper[4605]: I1001 13:49:57.375936 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b4dc6f5-28e8-4c4e-bd69-59fa8a1d60a4" containerName="registry-server" Oct 01 13:49:57 crc kubenswrapper[4605]: E1001 13:49:57.375945 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd03b5be-380c-4bc8-aea1-2467ebe8a390" containerName="extract-content" Oct 01 13:49:57 crc kubenswrapper[4605]: I1001 13:49:57.375951 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd03b5be-380c-4bc8-aea1-2467ebe8a390" containerName="extract-content" Oct 01 13:49:57 crc kubenswrapper[4605]: E1001 13:49:57.375960 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="877e04cf-b58e-470f-adc7-8f7fdb0ccddb" containerName="extract-utilities" Oct 01 13:49:57 crc kubenswrapper[4605]: I1001 13:49:57.375967 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="877e04cf-b58e-470f-adc7-8f7fdb0ccddb" containerName="extract-utilities" Oct 01 13:49:57 crc kubenswrapper[4605]: E1001 13:49:57.375976 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9317edb7-8cf8-4045-95a9-196349d21055" containerName="marketplace-operator" Oct 01 13:49:57 crc kubenswrapper[4605]: I1001 13:49:57.375982 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="9317edb7-8cf8-4045-95a9-196349d21055" containerName="marketplace-operator" Oct 01 13:49:57 crc kubenswrapper[4605]: E1001 13:49:57.375991 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b4dc6f5-28e8-4c4e-bd69-59fa8a1d60a4" containerName="extract-utilities" Oct 01 13:49:57 crc kubenswrapper[4605]: I1001 13:49:57.375996 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b4dc6f5-28e8-4c4e-bd69-59fa8a1d60a4" containerName="extract-utilities" Oct 01 13:49:57 crc kubenswrapper[4605]: E1001 13:49:57.376004 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b4dc6f5-28e8-4c4e-bd69-59fa8a1d60a4" containerName="extract-content" Oct 01 13:49:57 crc kubenswrapper[4605]: I1001 13:49:57.376009 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b4dc6f5-28e8-4c4e-bd69-59fa8a1d60a4" containerName="extract-content" Oct 01 13:49:57 crc kubenswrapper[4605]: E1001 13:49:57.376020 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd03b5be-380c-4bc8-aea1-2467ebe8a390" containerName="extract-utilities" Oct 01 13:49:57 crc kubenswrapper[4605]: I1001 13:49:57.376025 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd03b5be-380c-4bc8-aea1-2467ebe8a390" containerName="extract-utilities" Oct 01 13:49:57 crc kubenswrapper[4605]: E1001 13:49:57.376033 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="877e04cf-b58e-470f-adc7-8f7fdb0ccddb" containerName="registry-server" Oct 01 13:49:57 crc kubenswrapper[4605]: I1001 13:49:57.376038 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="877e04cf-b58e-470f-adc7-8f7fdb0ccddb" containerName="registry-server" Oct 01 13:49:57 crc kubenswrapper[4605]: E1001 13:49:57.376047 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="877e04cf-b58e-470f-adc7-8f7fdb0ccddb" containerName="extract-content" Oct 01 13:49:57 crc kubenswrapper[4605]: I1001 13:49:57.376052 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="877e04cf-b58e-470f-adc7-8f7fdb0ccddb" containerName="extract-content" Oct 01 13:49:57 crc kubenswrapper[4605]: E1001 13:49:57.376061 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd03b5be-380c-4bc8-aea1-2467ebe8a390" containerName="registry-server" Oct 01 13:49:57 crc kubenswrapper[4605]: I1001 13:49:57.376066 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd03b5be-380c-4bc8-aea1-2467ebe8a390" containerName="registry-server" Oct 01 13:49:57 crc kubenswrapper[4605]: I1001 13:49:57.376159 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="370d45c2-c748-4333-bbf5-9f2767f225f5" containerName="registry-server" Oct 01 13:49:57 crc kubenswrapper[4605]: I1001 13:49:57.376170 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b4dc6f5-28e8-4c4e-bd69-59fa8a1d60a4" containerName="registry-server" Oct 01 13:49:57 crc kubenswrapper[4605]: I1001 13:49:57.376178 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="9317edb7-8cf8-4045-95a9-196349d21055" containerName="marketplace-operator" Oct 01 13:49:57 crc kubenswrapper[4605]: I1001 13:49:57.376186 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="877e04cf-b58e-470f-adc7-8f7fdb0ccddb" containerName="registry-server" Oct 01 13:49:57 crc kubenswrapper[4605]: I1001 13:49:57.376194 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd03b5be-380c-4bc8-aea1-2467ebe8a390" containerName="registry-server" Oct 01 13:49:57 crc kubenswrapper[4605]: I1001 13:49:57.376851 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xgctj" Oct 01 13:49:57 crc kubenswrapper[4605]: I1001 13:49:57.380718 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Oct 01 13:49:57 crc kubenswrapper[4605]: I1001 13:49:57.393430 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-xgctj"] Oct 01 13:49:57 crc kubenswrapper[4605]: I1001 13:49:57.470235 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v5pvb\" (UniqueName: \"kubernetes.io/projected/6a5db63d-46d4-4967-b16e-5ee3222617d0-kube-api-access-v5pvb\") pod \"redhat-marketplace-xgctj\" (UID: \"6a5db63d-46d4-4967-b16e-5ee3222617d0\") " pod="openshift-marketplace/redhat-marketplace-xgctj" Oct 01 13:49:57 crc kubenswrapper[4605]: I1001 13:49:57.470304 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6a5db63d-46d4-4967-b16e-5ee3222617d0-utilities\") pod \"redhat-marketplace-xgctj\" (UID: \"6a5db63d-46d4-4967-b16e-5ee3222617d0\") " pod="openshift-marketplace/redhat-marketplace-xgctj" Oct 01 13:49:57 crc kubenswrapper[4605]: I1001 13:49:57.470394 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6a5db63d-46d4-4967-b16e-5ee3222617d0-catalog-content\") pod \"redhat-marketplace-xgctj\" (UID: \"6a5db63d-46d4-4967-b16e-5ee3222617d0\") " pod="openshift-marketplace/redhat-marketplace-xgctj" Oct 01 13:49:57 crc kubenswrapper[4605]: I1001 13:49:57.572260 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6a5db63d-46d4-4967-b16e-5ee3222617d0-utilities\") pod \"redhat-marketplace-xgctj\" (UID: \"6a5db63d-46d4-4967-b16e-5ee3222617d0\") " pod="openshift-marketplace/redhat-marketplace-xgctj" Oct 01 13:49:57 crc kubenswrapper[4605]: I1001 13:49:57.572320 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6a5db63d-46d4-4967-b16e-5ee3222617d0-catalog-content\") pod \"redhat-marketplace-xgctj\" (UID: \"6a5db63d-46d4-4967-b16e-5ee3222617d0\") " pod="openshift-marketplace/redhat-marketplace-xgctj" Oct 01 13:49:57 crc kubenswrapper[4605]: I1001 13:49:57.572362 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v5pvb\" (UniqueName: \"kubernetes.io/projected/6a5db63d-46d4-4967-b16e-5ee3222617d0-kube-api-access-v5pvb\") pod \"redhat-marketplace-xgctj\" (UID: \"6a5db63d-46d4-4967-b16e-5ee3222617d0\") " pod="openshift-marketplace/redhat-marketplace-xgctj" Oct 01 13:49:57 crc kubenswrapper[4605]: I1001 13:49:57.572928 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6a5db63d-46d4-4967-b16e-5ee3222617d0-utilities\") pod \"redhat-marketplace-xgctj\" (UID: \"6a5db63d-46d4-4967-b16e-5ee3222617d0\") " pod="openshift-marketplace/redhat-marketplace-xgctj" Oct 01 13:49:57 crc kubenswrapper[4605]: I1001 13:49:57.573027 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6a5db63d-46d4-4967-b16e-5ee3222617d0-catalog-content\") pod \"redhat-marketplace-xgctj\" (UID: \"6a5db63d-46d4-4967-b16e-5ee3222617d0\") " pod="openshift-marketplace/redhat-marketplace-xgctj" Oct 01 13:49:57 crc kubenswrapper[4605]: I1001 13:49:57.578010 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-fw6xk"] Oct 01 13:49:57 crc kubenswrapper[4605]: I1001 13:49:57.579203 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fw6xk" Oct 01 13:49:57 crc kubenswrapper[4605]: I1001 13:49:57.583756 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Oct 01 13:49:57 crc kubenswrapper[4605]: I1001 13:49:57.587253 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fw6xk"] Oct 01 13:49:57 crc kubenswrapper[4605]: I1001 13:49:57.595711 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v5pvb\" (UniqueName: \"kubernetes.io/projected/6a5db63d-46d4-4967-b16e-5ee3222617d0-kube-api-access-v5pvb\") pod \"redhat-marketplace-xgctj\" (UID: \"6a5db63d-46d4-4967-b16e-5ee3222617d0\") " pod="openshift-marketplace/redhat-marketplace-xgctj" Oct 01 13:49:57 crc kubenswrapper[4605]: I1001 13:49:57.672666 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lh9r9\" (UniqueName: \"kubernetes.io/projected/d5682155-2c45-4654-b77a-75760c61c945-kube-api-access-lh9r9\") pod \"redhat-operators-fw6xk\" (UID: \"d5682155-2c45-4654-b77a-75760c61c945\") " pod="openshift-marketplace/redhat-operators-fw6xk" Oct 01 13:49:57 crc kubenswrapper[4605]: I1001 13:49:57.672729 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d5682155-2c45-4654-b77a-75760c61c945-catalog-content\") pod \"redhat-operators-fw6xk\" (UID: \"d5682155-2c45-4654-b77a-75760c61c945\") " pod="openshift-marketplace/redhat-operators-fw6xk" Oct 01 13:49:57 crc kubenswrapper[4605]: I1001 13:49:57.672773 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d5682155-2c45-4654-b77a-75760c61c945-utilities\") pod \"redhat-operators-fw6xk\" (UID: \"d5682155-2c45-4654-b77a-75760c61c945\") " pod="openshift-marketplace/redhat-operators-fw6xk" Oct 01 13:49:57 crc kubenswrapper[4605]: I1001 13:49:57.698160 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xgctj" Oct 01 13:49:57 crc kubenswrapper[4605]: I1001 13:49:57.787991 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d5682155-2c45-4654-b77a-75760c61c945-catalog-content\") pod \"redhat-operators-fw6xk\" (UID: \"d5682155-2c45-4654-b77a-75760c61c945\") " pod="openshift-marketplace/redhat-operators-fw6xk" Oct 01 13:49:57 crc kubenswrapper[4605]: I1001 13:49:57.788171 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d5682155-2c45-4654-b77a-75760c61c945-utilities\") pod \"redhat-operators-fw6xk\" (UID: \"d5682155-2c45-4654-b77a-75760c61c945\") " pod="openshift-marketplace/redhat-operators-fw6xk" Oct 01 13:49:57 crc kubenswrapper[4605]: I1001 13:49:57.788289 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lh9r9\" (UniqueName: \"kubernetes.io/projected/d5682155-2c45-4654-b77a-75760c61c945-kube-api-access-lh9r9\") pod \"redhat-operators-fw6xk\" (UID: \"d5682155-2c45-4654-b77a-75760c61c945\") " pod="openshift-marketplace/redhat-operators-fw6xk" Oct 01 13:49:57 crc kubenswrapper[4605]: I1001 13:49:57.788534 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d5682155-2c45-4654-b77a-75760c61c945-catalog-content\") pod \"redhat-operators-fw6xk\" (UID: \"d5682155-2c45-4654-b77a-75760c61c945\") " pod="openshift-marketplace/redhat-operators-fw6xk" Oct 01 13:49:57 crc kubenswrapper[4605]: I1001 13:49:57.788761 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d5682155-2c45-4654-b77a-75760c61c945-utilities\") pod \"redhat-operators-fw6xk\" (UID: \"d5682155-2c45-4654-b77a-75760c61c945\") " pod="openshift-marketplace/redhat-operators-fw6xk" Oct 01 13:49:57 crc kubenswrapper[4605]: I1001 13:49:57.806283 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lh9r9\" (UniqueName: \"kubernetes.io/projected/d5682155-2c45-4654-b77a-75760c61c945-kube-api-access-lh9r9\") pod \"redhat-operators-fw6xk\" (UID: \"d5682155-2c45-4654-b77a-75760c61c945\") " pod="openshift-marketplace/redhat-operators-fw6xk" Oct 01 13:49:57 crc kubenswrapper[4605]: I1001 13:49:57.922389 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fw6xk" Oct 01 13:49:57 crc kubenswrapper[4605]: I1001 13:49:57.932356 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="370d45c2-c748-4333-bbf5-9f2767f225f5" path="/var/lib/kubelet/pods/370d45c2-c748-4333-bbf5-9f2767f225f5/volumes" Oct 01 13:49:57 crc kubenswrapper[4605]: I1001 13:49:57.933583 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6b4dc6f5-28e8-4c4e-bd69-59fa8a1d60a4" path="/var/lib/kubelet/pods/6b4dc6f5-28e8-4c4e-bd69-59fa8a1d60a4/volumes" Oct 01 13:49:57 crc kubenswrapper[4605]: I1001 13:49:57.934323 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="877e04cf-b58e-470f-adc7-8f7fdb0ccddb" path="/var/lib/kubelet/pods/877e04cf-b58e-470f-adc7-8f7fdb0ccddb/volumes" Oct 01 13:49:57 crc kubenswrapper[4605]: I1001 13:49:57.935544 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9317edb7-8cf8-4045-95a9-196349d21055" path="/var/lib/kubelet/pods/9317edb7-8cf8-4045-95a9-196349d21055/volumes" Oct 01 13:49:57 crc kubenswrapper[4605]: I1001 13:49:57.936206 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fd03b5be-380c-4bc8-aea1-2467ebe8a390" path="/var/lib/kubelet/pods/fd03b5be-380c-4bc8-aea1-2467ebe8a390/volumes" Oct 01 13:49:58 crc kubenswrapper[4605]: I1001 13:49:58.083725 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fw6xk"] Oct 01 13:49:58 crc kubenswrapper[4605]: W1001 13:49:58.093880 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd5682155_2c45_4654_b77a_75760c61c945.slice/crio-53c9d13dcaf970014e08f7deaf77c7e8029424891c2dd88d09cf84df3b24bec6 WatchSource:0}: Error finding container 53c9d13dcaf970014e08f7deaf77c7e8029424891c2dd88d09cf84df3b24bec6: Status 404 returned error can't find the container with id 53c9d13dcaf970014e08f7deaf77c7e8029424891c2dd88d09cf84df3b24bec6 Oct 01 13:49:58 crc kubenswrapper[4605]: I1001 13:49:58.105557 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-xgctj"] Oct 01 13:49:58 crc kubenswrapper[4605]: W1001 13:49:58.113677 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6a5db63d_46d4_4967_b16e_5ee3222617d0.slice/crio-a65565fd0ee37400ab60cd0ed6ef67e7e38d3a9de53ab371b93768b74bd16498 WatchSource:0}: Error finding container a65565fd0ee37400ab60cd0ed6ef67e7e38d3a9de53ab371b93768b74bd16498: Status 404 returned error can't find the container with id a65565fd0ee37400ab60cd0ed6ef67e7e38d3a9de53ab371b93768b74bd16498 Oct 01 13:49:58 crc kubenswrapper[4605]: I1001 13:49:58.177038 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fw6xk" event={"ID":"d5682155-2c45-4654-b77a-75760c61c945","Type":"ContainerStarted","Data":"53c9d13dcaf970014e08f7deaf77c7e8029424891c2dd88d09cf84df3b24bec6"} Oct 01 13:49:58 crc kubenswrapper[4605]: I1001 13:49:58.179489 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xgctj" event={"ID":"6a5db63d-46d4-4967-b16e-5ee3222617d0","Type":"ContainerStarted","Data":"a65565fd0ee37400ab60cd0ed6ef67e7e38d3a9de53ab371b93768b74bd16498"} Oct 01 13:49:59 crc kubenswrapper[4605]: I1001 13:49:59.186053 4605 generic.go:334] "Generic (PLEG): container finished" podID="d5682155-2c45-4654-b77a-75760c61c945" containerID="990ba9ab015521aa1684547debb47ba9142fe90712e840eeb290fdf9c912d349" exitCode=0 Oct 01 13:49:59 crc kubenswrapper[4605]: I1001 13:49:59.186129 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fw6xk" event={"ID":"d5682155-2c45-4654-b77a-75760c61c945","Type":"ContainerDied","Data":"990ba9ab015521aa1684547debb47ba9142fe90712e840eeb290fdf9c912d349"} Oct 01 13:49:59 crc kubenswrapper[4605]: I1001 13:49:59.193021 4605 generic.go:334] "Generic (PLEG): container finished" podID="6a5db63d-46d4-4967-b16e-5ee3222617d0" containerID="82e71421c176c08b3e6841f91a9158dd566d144087cb682a9be4637380c9b1b3" exitCode=0 Oct 01 13:49:59 crc kubenswrapper[4605]: I1001 13:49:59.193216 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xgctj" event={"ID":"6a5db63d-46d4-4967-b16e-5ee3222617d0","Type":"ContainerDied","Data":"82e71421c176c08b3e6841f91a9158dd566d144087cb682a9be4637380c9b1b3"} Oct 01 13:49:59 crc kubenswrapper[4605]: I1001 13:49:59.783128 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-rdmzw"] Oct 01 13:49:59 crc kubenswrapper[4605]: I1001 13:49:59.784374 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rdmzw" Oct 01 13:49:59 crc kubenswrapper[4605]: I1001 13:49:59.786164 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Oct 01 13:49:59 crc kubenswrapper[4605]: I1001 13:49:59.789356 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-rdmzw"] Oct 01 13:49:59 crc kubenswrapper[4605]: I1001 13:49:59.827698 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4e85ab9d-cf8d-4814-939c-c779c53bfa45-utilities\") pod \"certified-operators-rdmzw\" (UID: \"4e85ab9d-cf8d-4814-939c-c779c53bfa45\") " pod="openshift-marketplace/certified-operators-rdmzw" Oct 01 13:49:59 crc kubenswrapper[4605]: I1001 13:49:59.827778 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jjmxb\" (UniqueName: \"kubernetes.io/projected/4e85ab9d-cf8d-4814-939c-c779c53bfa45-kube-api-access-jjmxb\") pod \"certified-operators-rdmzw\" (UID: \"4e85ab9d-cf8d-4814-939c-c779c53bfa45\") " pod="openshift-marketplace/certified-operators-rdmzw" Oct 01 13:49:59 crc kubenswrapper[4605]: I1001 13:49:59.827852 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4e85ab9d-cf8d-4814-939c-c779c53bfa45-catalog-content\") pod \"certified-operators-rdmzw\" (UID: \"4e85ab9d-cf8d-4814-939c-c779c53bfa45\") " pod="openshift-marketplace/certified-operators-rdmzw" Oct 01 13:49:59 crc kubenswrapper[4605]: I1001 13:49:59.928741 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4e85ab9d-cf8d-4814-939c-c779c53bfa45-utilities\") pod \"certified-operators-rdmzw\" (UID: \"4e85ab9d-cf8d-4814-939c-c779c53bfa45\") " pod="openshift-marketplace/certified-operators-rdmzw" Oct 01 13:49:59 crc kubenswrapper[4605]: I1001 13:49:59.928811 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jjmxb\" (UniqueName: \"kubernetes.io/projected/4e85ab9d-cf8d-4814-939c-c779c53bfa45-kube-api-access-jjmxb\") pod \"certified-operators-rdmzw\" (UID: \"4e85ab9d-cf8d-4814-939c-c779c53bfa45\") " pod="openshift-marketplace/certified-operators-rdmzw" Oct 01 13:49:59 crc kubenswrapper[4605]: I1001 13:49:59.928858 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4e85ab9d-cf8d-4814-939c-c779c53bfa45-catalog-content\") pod \"certified-operators-rdmzw\" (UID: \"4e85ab9d-cf8d-4814-939c-c779c53bfa45\") " pod="openshift-marketplace/certified-operators-rdmzw" Oct 01 13:49:59 crc kubenswrapper[4605]: I1001 13:49:59.929418 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4e85ab9d-cf8d-4814-939c-c779c53bfa45-utilities\") pod \"certified-operators-rdmzw\" (UID: \"4e85ab9d-cf8d-4814-939c-c779c53bfa45\") " pod="openshift-marketplace/certified-operators-rdmzw" Oct 01 13:49:59 crc kubenswrapper[4605]: I1001 13:49:59.929656 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4e85ab9d-cf8d-4814-939c-c779c53bfa45-catalog-content\") pod \"certified-operators-rdmzw\" (UID: \"4e85ab9d-cf8d-4814-939c-c779c53bfa45\") " pod="openshift-marketplace/certified-operators-rdmzw" Oct 01 13:49:59 crc kubenswrapper[4605]: I1001 13:49:59.949712 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jjmxb\" (UniqueName: \"kubernetes.io/projected/4e85ab9d-cf8d-4814-939c-c779c53bfa45-kube-api-access-jjmxb\") pod \"certified-operators-rdmzw\" (UID: \"4e85ab9d-cf8d-4814-939c-c779c53bfa45\") " pod="openshift-marketplace/certified-operators-rdmzw" Oct 01 13:49:59 crc kubenswrapper[4605]: I1001 13:49:59.992016 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-7gnm7"] Oct 01 13:49:59 crc kubenswrapper[4605]: I1001 13:49:59.993540 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7gnm7" Oct 01 13:49:59 crc kubenswrapper[4605]: I1001 13:49:59.995151 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-7gnm7"] Oct 01 13:49:59 crc kubenswrapper[4605]: I1001 13:49:59.995904 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Oct 01 13:50:00 crc kubenswrapper[4605]: I1001 13:50:00.029768 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/46973963-c69a-460a-a5c3-3711005e4e00-utilities\") pod \"community-operators-7gnm7\" (UID: \"46973963-c69a-460a-a5c3-3711005e4e00\") " pod="openshift-marketplace/community-operators-7gnm7" Oct 01 13:50:00 crc kubenswrapper[4605]: I1001 13:50:00.030196 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/46973963-c69a-460a-a5c3-3711005e4e00-catalog-content\") pod \"community-operators-7gnm7\" (UID: \"46973963-c69a-460a-a5c3-3711005e4e00\") " pod="openshift-marketplace/community-operators-7gnm7" Oct 01 13:50:00 crc kubenswrapper[4605]: I1001 13:50:00.030309 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-752wt\" (UniqueName: \"kubernetes.io/projected/46973963-c69a-460a-a5c3-3711005e4e00-kube-api-access-752wt\") pod \"community-operators-7gnm7\" (UID: \"46973963-c69a-460a-a5c3-3711005e4e00\") " pod="openshift-marketplace/community-operators-7gnm7" Oct 01 13:50:00 crc kubenswrapper[4605]: I1001 13:50:00.121603 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rdmzw" Oct 01 13:50:00 crc kubenswrapper[4605]: I1001 13:50:00.133799 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/46973963-c69a-460a-a5c3-3711005e4e00-utilities\") pod \"community-operators-7gnm7\" (UID: \"46973963-c69a-460a-a5c3-3711005e4e00\") " pod="openshift-marketplace/community-operators-7gnm7" Oct 01 13:50:00 crc kubenswrapper[4605]: I1001 13:50:00.133869 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/46973963-c69a-460a-a5c3-3711005e4e00-catalog-content\") pod \"community-operators-7gnm7\" (UID: \"46973963-c69a-460a-a5c3-3711005e4e00\") " pod="openshift-marketplace/community-operators-7gnm7" Oct 01 13:50:00 crc kubenswrapper[4605]: I1001 13:50:00.133904 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-752wt\" (UniqueName: \"kubernetes.io/projected/46973963-c69a-460a-a5c3-3711005e4e00-kube-api-access-752wt\") pod \"community-operators-7gnm7\" (UID: \"46973963-c69a-460a-a5c3-3711005e4e00\") " pod="openshift-marketplace/community-operators-7gnm7" Oct 01 13:50:00 crc kubenswrapper[4605]: I1001 13:50:00.134730 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/46973963-c69a-460a-a5c3-3711005e4e00-catalog-content\") pod \"community-operators-7gnm7\" (UID: \"46973963-c69a-460a-a5c3-3711005e4e00\") " pod="openshift-marketplace/community-operators-7gnm7" Oct 01 13:50:00 crc kubenswrapper[4605]: I1001 13:50:00.142260 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/46973963-c69a-460a-a5c3-3711005e4e00-utilities\") pod \"community-operators-7gnm7\" (UID: \"46973963-c69a-460a-a5c3-3711005e4e00\") " pod="openshift-marketplace/community-operators-7gnm7" Oct 01 13:50:00 crc kubenswrapper[4605]: I1001 13:50:00.151115 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-752wt\" (UniqueName: \"kubernetes.io/projected/46973963-c69a-460a-a5c3-3711005e4e00-kube-api-access-752wt\") pod \"community-operators-7gnm7\" (UID: \"46973963-c69a-460a-a5c3-3711005e4e00\") " pod="openshift-marketplace/community-operators-7gnm7" Oct 01 13:50:00 crc kubenswrapper[4605]: I1001 13:50:00.311823 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-rdmzw"] Oct 01 13:50:00 crc kubenswrapper[4605]: I1001 13:50:00.320067 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7gnm7" Oct 01 13:50:00 crc kubenswrapper[4605]: I1001 13:50:00.797021 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-7gnm7"] Oct 01 13:50:00 crc kubenswrapper[4605]: W1001 13:50:00.824674 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod46973963_c69a_460a_a5c3_3711005e4e00.slice/crio-e82cdcfbbc16bdb0e6520fe7860635cb62fb41f85423a12c92d606c3b3dfea6a WatchSource:0}: Error finding container e82cdcfbbc16bdb0e6520fe7860635cb62fb41f85423a12c92d606c3b3dfea6a: Status 404 returned error can't find the container with id e82cdcfbbc16bdb0e6520fe7860635cb62fb41f85423a12c92d606c3b3dfea6a Oct 01 13:50:01 crc kubenswrapper[4605]: I1001 13:50:01.217689 4605 generic.go:334] "Generic (PLEG): container finished" podID="6a5db63d-46d4-4967-b16e-5ee3222617d0" containerID="217fe7ae3818908179075ad77b95006ffb0635c78b68832a23f3df641d0e4751" exitCode=0 Oct 01 13:50:01 crc kubenswrapper[4605]: I1001 13:50:01.217735 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xgctj" event={"ID":"6a5db63d-46d4-4967-b16e-5ee3222617d0","Type":"ContainerDied","Data":"217fe7ae3818908179075ad77b95006ffb0635c78b68832a23f3df641d0e4751"} Oct 01 13:50:01 crc kubenswrapper[4605]: I1001 13:50:01.222820 4605 generic.go:334] "Generic (PLEG): container finished" podID="46973963-c69a-460a-a5c3-3711005e4e00" containerID="caa092ed04dada9777337a22f2a9a1ea94a36cac2d154cda0a173170bb7e2437" exitCode=0 Oct 01 13:50:01 crc kubenswrapper[4605]: I1001 13:50:01.222924 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7gnm7" event={"ID":"46973963-c69a-460a-a5c3-3711005e4e00","Type":"ContainerDied","Data":"caa092ed04dada9777337a22f2a9a1ea94a36cac2d154cda0a173170bb7e2437"} Oct 01 13:50:01 crc kubenswrapper[4605]: I1001 13:50:01.222985 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7gnm7" event={"ID":"46973963-c69a-460a-a5c3-3711005e4e00","Type":"ContainerStarted","Data":"e82cdcfbbc16bdb0e6520fe7860635cb62fb41f85423a12c92d606c3b3dfea6a"} Oct 01 13:50:01 crc kubenswrapper[4605]: I1001 13:50:01.227824 4605 generic.go:334] "Generic (PLEG): container finished" podID="d5682155-2c45-4654-b77a-75760c61c945" containerID="c7787aaf7d14f72c5b0a71dae1d1ecc32d9aa9c6644dffe3dd8386546023965a" exitCode=0 Oct 01 13:50:01 crc kubenswrapper[4605]: I1001 13:50:01.228059 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fw6xk" event={"ID":"d5682155-2c45-4654-b77a-75760c61c945","Type":"ContainerDied","Data":"c7787aaf7d14f72c5b0a71dae1d1ecc32d9aa9c6644dffe3dd8386546023965a"} Oct 01 13:50:01 crc kubenswrapper[4605]: I1001 13:50:01.232357 4605 generic.go:334] "Generic (PLEG): container finished" podID="4e85ab9d-cf8d-4814-939c-c779c53bfa45" containerID="f5f002786c3e437b0ca33f8f52cd5800d4af37154d01c1a9d54cd704194067ef" exitCode=0 Oct 01 13:50:01 crc kubenswrapper[4605]: I1001 13:50:01.232392 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rdmzw" event={"ID":"4e85ab9d-cf8d-4814-939c-c779c53bfa45","Type":"ContainerDied","Data":"f5f002786c3e437b0ca33f8f52cd5800d4af37154d01c1a9d54cd704194067ef"} Oct 01 13:50:01 crc kubenswrapper[4605]: I1001 13:50:01.232415 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rdmzw" event={"ID":"4e85ab9d-cf8d-4814-939c-c779c53bfa45","Type":"ContainerStarted","Data":"b563c85c9075fef074ec274fe08593448400fc75c897bf2ab603ca49f1d45dd8"} Oct 01 13:50:02 crc kubenswrapper[4605]: I1001 13:50:02.240315 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fw6xk" event={"ID":"d5682155-2c45-4654-b77a-75760c61c945","Type":"ContainerStarted","Data":"c9c617bc576ba8c2e4df54bf25254830bcc3e807d6896798fe24ad884d58b07e"} Oct 01 13:50:02 crc kubenswrapper[4605]: I1001 13:50:02.242555 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xgctj" event={"ID":"6a5db63d-46d4-4967-b16e-5ee3222617d0","Type":"ContainerStarted","Data":"df599533f4d1cfdfb4ef9c8881fd43c7bc30192290ed06b5a93ba15145ccd5fe"} Oct 01 13:50:02 crc kubenswrapper[4605]: I1001 13:50:02.263535 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-fw6xk" podStartSLOduration=2.5133665179999998 podStartE2EDuration="5.263521108s" podCreationTimestamp="2025-10-01 13:49:57 +0000 UTC" firstStartedPulling="2025-10-01 13:49:59.188451647 +0000 UTC m=+321.932427855" lastFinishedPulling="2025-10-01 13:50:01.938606237 +0000 UTC m=+324.682582445" observedRunningTime="2025-10-01 13:50:02.256203121 +0000 UTC m=+325.000179329" watchObservedRunningTime="2025-10-01 13:50:02.263521108 +0000 UTC m=+325.007497316" Oct 01 13:50:02 crc kubenswrapper[4605]: I1001 13:50:02.272858 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-xgctj" podStartSLOduration=2.707763246 podStartE2EDuration="5.272840026s" podCreationTimestamp="2025-10-01 13:49:57 +0000 UTC" firstStartedPulling="2025-10-01 13:49:59.194872993 +0000 UTC m=+321.938849201" lastFinishedPulling="2025-10-01 13:50:01.759949733 +0000 UTC m=+324.503925981" observedRunningTime="2025-10-01 13:50:02.270996239 +0000 UTC m=+325.014972447" watchObservedRunningTime="2025-10-01 13:50:02.272840026 +0000 UTC m=+325.016816234" Oct 01 13:50:03 crc kubenswrapper[4605]: I1001 13:50:03.285577 4605 generic.go:334] "Generic (PLEG): container finished" podID="4e85ab9d-cf8d-4814-939c-c779c53bfa45" containerID="1bf5fe45c2ebaa62c573e8267d46e19fef9d074b640c63bb8dab3c9f88aab20f" exitCode=0 Oct 01 13:50:03 crc kubenswrapper[4605]: I1001 13:50:03.286257 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rdmzw" event={"ID":"4e85ab9d-cf8d-4814-939c-c779c53bfa45","Type":"ContainerDied","Data":"1bf5fe45c2ebaa62c573e8267d46e19fef9d074b640c63bb8dab3c9f88aab20f"} Oct 01 13:50:03 crc kubenswrapper[4605]: I1001 13:50:03.289498 4605 generic.go:334] "Generic (PLEG): container finished" podID="46973963-c69a-460a-a5c3-3711005e4e00" containerID="0d243d7ed2f1c0e876c4aae54fbe239db7edb74a643e0e1ee2a1275698aac660" exitCode=0 Oct 01 13:50:03 crc kubenswrapper[4605]: I1001 13:50:03.290387 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7gnm7" event={"ID":"46973963-c69a-460a-a5c3-3711005e4e00","Type":"ContainerDied","Data":"0d243d7ed2f1c0e876c4aae54fbe239db7edb74a643e0e1ee2a1275698aac660"} Oct 01 13:50:05 crc kubenswrapper[4605]: I1001 13:50:05.303426 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7gnm7" event={"ID":"46973963-c69a-460a-a5c3-3711005e4e00","Type":"ContainerStarted","Data":"03a574246ef097bc2be5f64909cfc08a6085eb404702ce11ccb7c149687712eb"} Oct 01 13:50:05 crc kubenswrapper[4605]: I1001 13:50:05.307218 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rdmzw" event={"ID":"4e85ab9d-cf8d-4814-939c-c779c53bfa45","Type":"ContainerStarted","Data":"65a782b66606e7581e8d31bda79f80fd7b2f09f6e138ebab3eae0d3a0a8879c3"} Oct 01 13:50:05 crc kubenswrapper[4605]: I1001 13:50:05.323707 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-7gnm7" podStartSLOduration=3.534545613 podStartE2EDuration="6.323687556s" podCreationTimestamp="2025-10-01 13:49:59 +0000 UTC" firstStartedPulling="2025-10-01 13:50:01.224224918 +0000 UTC m=+323.968201126" lastFinishedPulling="2025-10-01 13:50:04.013366861 +0000 UTC m=+326.757343069" observedRunningTime="2025-10-01 13:50:05.320431723 +0000 UTC m=+328.064407941" watchObservedRunningTime="2025-10-01 13:50:05.323687556 +0000 UTC m=+328.067663764" Oct 01 13:50:05 crc kubenswrapper[4605]: I1001 13:50:05.340691 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-rdmzw" podStartSLOduration=3.636483372 podStartE2EDuration="6.34067274s" podCreationTimestamp="2025-10-01 13:49:59 +0000 UTC" firstStartedPulling="2025-10-01 13:50:01.235642394 +0000 UTC m=+323.979618602" lastFinishedPulling="2025-10-01 13:50:03.939831762 +0000 UTC m=+326.683807970" observedRunningTime="2025-10-01 13:50:05.33949994 +0000 UTC m=+328.083476158" watchObservedRunningTime="2025-10-01 13:50:05.34067274 +0000 UTC m=+328.084648948" Oct 01 13:50:07 crc kubenswrapper[4605]: I1001 13:50:07.698535 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-xgctj" Oct 01 13:50:07 crc kubenswrapper[4605]: I1001 13:50:07.698888 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-xgctj" Oct 01 13:50:07 crc kubenswrapper[4605]: I1001 13:50:07.737080 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-xgctj" Oct 01 13:50:07 crc kubenswrapper[4605]: I1001 13:50:07.922988 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-fw6xk" Oct 01 13:50:07 crc kubenswrapper[4605]: I1001 13:50:07.923051 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-fw6xk" Oct 01 13:50:07 crc kubenswrapper[4605]: I1001 13:50:07.965308 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-fw6xk" Oct 01 13:50:08 crc kubenswrapper[4605]: I1001 13:50:08.357742 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-xgctj" Oct 01 13:50:08 crc kubenswrapper[4605]: I1001 13:50:08.358746 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-fw6xk" Oct 01 13:50:10 crc kubenswrapper[4605]: I1001 13:50:10.122358 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-rdmzw" Oct 01 13:50:10 crc kubenswrapper[4605]: I1001 13:50:10.122903 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-rdmzw" Oct 01 13:50:10 crc kubenswrapper[4605]: I1001 13:50:10.164240 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-rdmzw" Oct 01 13:50:10 crc kubenswrapper[4605]: I1001 13:50:10.320693 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-7gnm7" Oct 01 13:50:10 crc kubenswrapper[4605]: I1001 13:50:10.320752 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-7gnm7" Oct 01 13:50:10 crc kubenswrapper[4605]: I1001 13:50:10.361740 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-7gnm7" Oct 01 13:50:10 crc kubenswrapper[4605]: I1001 13:50:10.388586 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-rdmzw" Oct 01 13:50:10 crc kubenswrapper[4605]: I1001 13:50:10.404403 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-7gnm7" Oct 01 13:50:51 crc kubenswrapper[4605]: I1001 13:50:51.632037 4605 patch_prober.go:28] interesting pod/machine-config-daemon-zdjh7 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 13:50:51 crc kubenswrapper[4605]: I1001 13:50:51.632676 4605 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 13:51:21 crc kubenswrapper[4605]: I1001 13:51:21.631246 4605 patch_prober.go:28] interesting pod/machine-config-daemon-zdjh7 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 13:51:21 crc kubenswrapper[4605]: I1001 13:51:21.631913 4605 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 13:51:45 crc kubenswrapper[4605]: I1001 13:51:45.337271 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-8jjl4"] Oct 01 13:51:45 crc kubenswrapper[4605]: I1001 13:51:45.338708 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-8jjl4" Oct 01 13:51:45 crc kubenswrapper[4605]: I1001 13:51:45.353416 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-8jjl4"] Oct 01 13:51:45 crc kubenswrapper[4605]: I1001 13:51:45.485192 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/912b9879-0ee0-4b7a-82b1-3a458b9f4a60-registry-certificates\") pod \"image-registry-66df7c8f76-8jjl4\" (UID: \"912b9879-0ee0-4b7a-82b1-3a458b9f4a60\") " pod="openshift-image-registry/image-registry-66df7c8f76-8jjl4" Oct 01 13:51:45 crc kubenswrapper[4605]: I1001 13:51:45.485244 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/912b9879-0ee0-4b7a-82b1-3a458b9f4a60-registry-tls\") pod \"image-registry-66df7c8f76-8jjl4\" (UID: \"912b9879-0ee0-4b7a-82b1-3a458b9f4a60\") " pod="openshift-image-registry/image-registry-66df7c8f76-8jjl4" Oct 01 13:51:45 crc kubenswrapper[4605]: I1001 13:51:45.485312 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/912b9879-0ee0-4b7a-82b1-3a458b9f4a60-trusted-ca\") pod \"image-registry-66df7c8f76-8jjl4\" (UID: \"912b9879-0ee0-4b7a-82b1-3a458b9f4a60\") " pod="openshift-image-registry/image-registry-66df7c8f76-8jjl4" Oct 01 13:51:45 crc kubenswrapper[4605]: I1001 13:51:45.485377 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-8jjl4\" (UID: \"912b9879-0ee0-4b7a-82b1-3a458b9f4a60\") " pod="openshift-image-registry/image-registry-66df7c8f76-8jjl4" Oct 01 13:51:45 crc kubenswrapper[4605]: I1001 13:51:45.485403 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dkgbd\" (UniqueName: \"kubernetes.io/projected/912b9879-0ee0-4b7a-82b1-3a458b9f4a60-kube-api-access-dkgbd\") pod \"image-registry-66df7c8f76-8jjl4\" (UID: \"912b9879-0ee0-4b7a-82b1-3a458b9f4a60\") " pod="openshift-image-registry/image-registry-66df7c8f76-8jjl4" Oct 01 13:51:45 crc kubenswrapper[4605]: I1001 13:51:45.485423 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/912b9879-0ee0-4b7a-82b1-3a458b9f4a60-installation-pull-secrets\") pod \"image-registry-66df7c8f76-8jjl4\" (UID: \"912b9879-0ee0-4b7a-82b1-3a458b9f4a60\") " pod="openshift-image-registry/image-registry-66df7c8f76-8jjl4" Oct 01 13:51:45 crc kubenswrapper[4605]: I1001 13:51:45.485611 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/912b9879-0ee0-4b7a-82b1-3a458b9f4a60-ca-trust-extracted\") pod \"image-registry-66df7c8f76-8jjl4\" (UID: \"912b9879-0ee0-4b7a-82b1-3a458b9f4a60\") " pod="openshift-image-registry/image-registry-66df7c8f76-8jjl4" Oct 01 13:51:45 crc kubenswrapper[4605]: I1001 13:51:45.485646 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/912b9879-0ee0-4b7a-82b1-3a458b9f4a60-bound-sa-token\") pod \"image-registry-66df7c8f76-8jjl4\" (UID: \"912b9879-0ee0-4b7a-82b1-3a458b9f4a60\") " pod="openshift-image-registry/image-registry-66df7c8f76-8jjl4" Oct 01 13:51:45 crc kubenswrapper[4605]: I1001 13:51:45.510138 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-8jjl4\" (UID: \"912b9879-0ee0-4b7a-82b1-3a458b9f4a60\") " pod="openshift-image-registry/image-registry-66df7c8f76-8jjl4" Oct 01 13:51:45 crc kubenswrapper[4605]: I1001 13:51:45.586697 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dkgbd\" (UniqueName: \"kubernetes.io/projected/912b9879-0ee0-4b7a-82b1-3a458b9f4a60-kube-api-access-dkgbd\") pod \"image-registry-66df7c8f76-8jjl4\" (UID: \"912b9879-0ee0-4b7a-82b1-3a458b9f4a60\") " pod="openshift-image-registry/image-registry-66df7c8f76-8jjl4" Oct 01 13:51:45 crc kubenswrapper[4605]: I1001 13:51:45.586893 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/912b9879-0ee0-4b7a-82b1-3a458b9f4a60-installation-pull-secrets\") pod \"image-registry-66df7c8f76-8jjl4\" (UID: \"912b9879-0ee0-4b7a-82b1-3a458b9f4a60\") " pod="openshift-image-registry/image-registry-66df7c8f76-8jjl4" Oct 01 13:51:45 crc kubenswrapper[4605]: I1001 13:51:45.586992 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/912b9879-0ee0-4b7a-82b1-3a458b9f4a60-ca-trust-extracted\") pod \"image-registry-66df7c8f76-8jjl4\" (UID: \"912b9879-0ee0-4b7a-82b1-3a458b9f4a60\") " pod="openshift-image-registry/image-registry-66df7c8f76-8jjl4" Oct 01 13:51:45 crc kubenswrapper[4605]: I1001 13:51:45.587068 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/912b9879-0ee0-4b7a-82b1-3a458b9f4a60-bound-sa-token\") pod \"image-registry-66df7c8f76-8jjl4\" (UID: \"912b9879-0ee0-4b7a-82b1-3a458b9f4a60\") " pod="openshift-image-registry/image-registry-66df7c8f76-8jjl4" Oct 01 13:51:45 crc kubenswrapper[4605]: I1001 13:51:45.587153 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/912b9879-0ee0-4b7a-82b1-3a458b9f4a60-registry-certificates\") pod \"image-registry-66df7c8f76-8jjl4\" (UID: \"912b9879-0ee0-4b7a-82b1-3a458b9f4a60\") " pod="openshift-image-registry/image-registry-66df7c8f76-8jjl4" Oct 01 13:51:45 crc kubenswrapper[4605]: I1001 13:51:45.587219 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/912b9879-0ee0-4b7a-82b1-3a458b9f4a60-registry-tls\") pod \"image-registry-66df7c8f76-8jjl4\" (UID: \"912b9879-0ee0-4b7a-82b1-3a458b9f4a60\") " pod="openshift-image-registry/image-registry-66df7c8f76-8jjl4" Oct 01 13:51:45 crc kubenswrapper[4605]: I1001 13:51:45.587295 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/912b9879-0ee0-4b7a-82b1-3a458b9f4a60-trusted-ca\") pod \"image-registry-66df7c8f76-8jjl4\" (UID: \"912b9879-0ee0-4b7a-82b1-3a458b9f4a60\") " pod="openshift-image-registry/image-registry-66df7c8f76-8jjl4" Oct 01 13:51:45 crc kubenswrapper[4605]: I1001 13:51:45.587667 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/912b9879-0ee0-4b7a-82b1-3a458b9f4a60-ca-trust-extracted\") pod \"image-registry-66df7c8f76-8jjl4\" (UID: \"912b9879-0ee0-4b7a-82b1-3a458b9f4a60\") " pod="openshift-image-registry/image-registry-66df7c8f76-8jjl4" Oct 01 13:51:45 crc kubenswrapper[4605]: I1001 13:51:45.588818 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/912b9879-0ee0-4b7a-82b1-3a458b9f4a60-trusted-ca\") pod \"image-registry-66df7c8f76-8jjl4\" (UID: \"912b9879-0ee0-4b7a-82b1-3a458b9f4a60\") " pod="openshift-image-registry/image-registry-66df7c8f76-8jjl4" Oct 01 13:51:45 crc kubenswrapper[4605]: I1001 13:51:45.589018 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/912b9879-0ee0-4b7a-82b1-3a458b9f4a60-registry-certificates\") pod \"image-registry-66df7c8f76-8jjl4\" (UID: \"912b9879-0ee0-4b7a-82b1-3a458b9f4a60\") " pod="openshift-image-registry/image-registry-66df7c8f76-8jjl4" Oct 01 13:51:45 crc kubenswrapper[4605]: I1001 13:51:45.602012 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/912b9879-0ee0-4b7a-82b1-3a458b9f4a60-installation-pull-secrets\") pod \"image-registry-66df7c8f76-8jjl4\" (UID: \"912b9879-0ee0-4b7a-82b1-3a458b9f4a60\") " pod="openshift-image-registry/image-registry-66df7c8f76-8jjl4" Oct 01 13:51:45 crc kubenswrapper[4605]: I1001 13:51:45.602161 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/912b9879-0ee0-4b7a-82b1-3a458b9f4a60-registry-tls\") pod \"image-registry-66df7c8f76-8jjl4\" (UID: \"912b9879-0ee0-4b7a-82b1-3a458b9f4a60\") " pod="openshift-image-registry/image-registry-66df7c8f76-8jjl4" Oct 01 13:51:45 crc kubenswrapper[4605]: I1001 13:51:45.609166 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dkgbd\" (UniqueName: \"kubernetes.io/projected/912b9879-0ee0-4b7a-82b1-3a458b9f4a60-kube-api-access-dkgbd\") pod \"image-registry-66df7c8f76-8jjl4\" (UID: \"912b9879-0ee0-4b7a-82b1-3a458b9f4a60\") " pod="openshift-image-registry/image-registry-66df7c8f76-8jjl4" Oct 01 13:51:45 crc kubenswrapper[4605]: I1001 13:51:45.617047 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/912b9879-0ee0-4b7a-82b1-3a458b9f4a60-bound-sa-token\") pod \"image-registry-66df7c8f76-8jjl4\" (UID: \"912b9879-0ee0-4b7a-82b1-3a458b9f4a60\") " pod="openshift-image-registry/image-registry-66df7c8f76-8jjl4" Oct 01 13:51:45 crc kubenswrapper[4605]: I1001 13:51:45.654693 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-8jjl4" Oct 01 13:51:45 crc kubenswrapper[4605]: I1001 13:51:45.837122 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-8jjl4"] Oct 01 13:51:45 crc kubenswrapper[4605]: I1001 13:51:45.885656 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-8jjl4" event={"ID":"912b9879-0ee0-4b7a-82b1-3a458b9f4a60","Type":"ContainerStarted","Data":"8c72c2b399fede8b97ed6cdb311c66a401ccdf7c0fc9505d089942621179d3c4"} Oct 01 13:51:46 crc kubenswrapper[4605]: I1001 13:51:46.891513 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-8jjl4" event={"ID":"912b9879-0ee0-4b7a-82b1-3a458b9f4a60","Type":"ContainerStarted","Data":"8022f7fc8b9db0b9732ef142f59c27c23ab3d9806931b4589242168b58b19d6f"} Oct 01 13:51:46 crc kubenswrapper[4605]: I1001 13:51:46.891874 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-8jjl4" Oct 01 13:51:46 crc kubenswrapper[4605]: I1001 13:51:46.915890 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-8jjl4" podStartSLOduration=1.915869529 podStartE2EDuration="1.915869529s" podCreationTimestamp="2025-10-01 13:51:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 13:51:46.908800824 +0000 UTC m=+429.652777032" watchObservedRunningTime="2025-10-01 13:51:46.915869529 +0000 UTC m=+429.659845737" Oct 01 13:51:51 crc kubenswrapper[4605]: I1001 13:51:51.631077 4605 patch_prober.go:28] interesting pod/machine-config-daemon-zdjh7 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 13:51:51 crc kubenswrapper[4605]: I1001 13:51:51.631523 4605 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 13:51:51 crc kubenswrapper[4605]: I1001 13:51:51.631584 4605 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" Oct 01 13:51:51 crc kubenswrapper[4605]: I1001 13:51:51.632246 4605 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"79801dc6bc063ddb6a797ea140823ed637f359b821d68c718f59b852dd6781bd"} pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 01 13:51:51 crc kubenswrapper[4605]: I1001 13:51:51.632311 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" containerName="machine-config-daemon" containerID="cri-o://79801dc6bc063ddb6a797ea140823ed637f359b821d68c718f59b852dd6781bd" gracePeriod=600 Oct 01 13:51:51 crc kubenswrapper[4605]: I1001 13:51:51.916127 4605 generic.go:334] "Generic (PLEG): container finished" podID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" containerID="79801dc6bc063ddb6a797ea140823ed637f359b821d68c718f59b852dd6781bd" exitCode=0 Oct 01 13:51:51 crc kubenswrapper[4605]: I1001 13:51:51.916172 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" event={"ID":"f3023060-c8ae-492b-b1cb-a418d9a8e59f","Type":"ContainerDied","Data":"79801dc6bc063ddb6a797ea140823ed637f359b821d68c718f59b852dd6781bd"} Oct 01 13:51:51 crc kubenswrapper[4605]: I1001 13:51:51.916198 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" event={"ID":"f3023060-c8ae-492b-b1cb-a418d9a8e59f","Type":"ContainerStarted","Data":"ab2e158b8dbbad131cf42220449cc89d2e9b1e83ce456c3ebd1aa3f78648df9e"} Oct 01 13:51:51 crc kubenswrapper[4605]: I1001 13:51:51.916217 4605 scope.go:117] "RemoveContainer" containerID="ae5b8e3f4bd159c632b04545707c7140ba6fcee21a3a3847d5e7f2b9e41b9178" Oct 01 13:52:05 crc kubenswrapper[4605]: I1001 13:52:05.664404 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-8jjl4" Oct 01 13:52:05 crc kubenswrapper[4605]: I1001 13:52:05.728559 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-ncbbt"] Oct 01 13:52:30 crc kubenswrapper[4605]: I1001 13:52:30.771690 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" podUID="20fe9925-9f6b-4b69-a13d-e8ff88daaec6" containerName="registry" containerID="cri-o://7c9529b403cdb1fb229b4da78c3369686e8b204613ebe3ce0f79128c6b740297" gracePeriod=30 Oct 01 13:52:30 crc kubenswrapper[4605]: I1001 13:52:30.849340 4605 patch_prober.go:28] interesting pod/image-registry-697d97f7c8-ncbbt container/registry namespace/openshift-image-registry: Readiness probe status=failure output="Get \"https://10.217.0.33:5000/healthz\": dial tcp 10.217.0.33:5000: connect: connection refused" start-of-body= Oct 01 13:52:30 crc kubenswrapper[4605]: I1001 13:52:30.849841 4605 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" podUID="20fe9925-9f6b-4b69-a13d-e8ff88daaec6" containerName="registry" probeResult="failure" output="Get \"https://10.217.0.33:5000/healthz\": dial tcp 10.217.0.33:5000: connect: connection refused" Oct 01 13:52:31 crc kubenswrapper[4605]: I1001 13:52:31.134429 4605 generic.go:334] "Generic (PLEG): container finished" podID="20fe9925-9f6b-4b69-a13d-e8ff88daaec6" containerID="7c9529b403cdb1fb229b4da78c3369686e8b204613ebe3ce0f79128c6b740297" exitCode=0 Oct 01 13:52:31 crc kubenswrapper[4605]: I1001 13:52:31.134471 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" event={"ID":"20fe9925-9f6b-4b69-a13d-e8ff88daaec6","Type":"ContainerDied","Data":"7c9529b403cdb1fb229b4da78c3369686e8b204613ebe3ce0f79128c6b740297"} Oct 01 13:52:31 crc kubenswrapper[4605]: I1001 13:52:31.203123 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:52:31 crc kubenswrapper[4605]: I1001 13:52:31.278906 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/20fe9925-9f6b-4b69-a13d-e8ff88daaec6-bound-sa-token\") pod \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " Oct 01 13:52:31 crc kubenswrapper[4605]: I1001 13:52:31.278999 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dw4zp\" (UniqueName: \"kubernetes.io/projected/20fe9925-9f6b-4b69-a13d-e8ff88daaec6-kube-api-access-dw4zp\") pod \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " Oct 01 13:52:31 crc kubenswrapper[4605]: I1001 13:52:31.279049 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/20fe9925-9f6b-4b69-a13d-e8ff88daaec6-ca-trust-extracted\") pod \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " Oct 01 13:52:31 crc kubenswrapper[4605]: I1001 13:52:31.279150 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/20fe9925-9f6b-4b69-a13d-e8ff88daaec6-registry-tls\") pod \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " Oct 01 13:52:31 crc kubenswrapper[4605]: I1001 13:52:31.279206 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/20fe9925-9f6b-4b69-a13d-e8ff88daaec6-installation-pull-secrets\") pod \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " Oct 01 13:52:31 crc kubenswrapper[4605]: I1001 13:52:31.279239 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/20fe9925-9f6b-4b69-a13d-e8ff88daaec6-trusted-ca\") pod \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " Oct 01 13:52:31 crc kubenswrapper[4605]: I1001 13:52:31.279393 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " Oct 01 13:52:31 crc kubenswrapper[4605]: I1001 13:52:31.279469 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/20fe9925-9f6b-4b69-a13d-e8ff88daaec6-registry-certificates\") pod \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\" (UID: \"20fe9925-9f6b-4b69-a13d-e8ff88daaec6\") " Oct 01 13:52:31 crc kubenswrapper[4605]: I1001 13:52:31.280139 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/20fe9925-9f6b-4b69-a13d-e8ff88daaec6-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "20fe9925-9f6b-4b69-a13d-e8ff88daaec6" (UID: "20fe9925-9f6b-4b69-a13d-e8ff88daaec6"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:52:31 crc kubenswrapper[4605]: I1001 13:52:31.280847 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/20fe9925-9f6b-4b69-a13d-e8ff88daaec6-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "20fe9925-9f6b-4b69-a13d-e8ff88daaec6" (UID: "20fe9925-9f6b-4b69-a13d-e8ff88daaec6"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:52:31 crc kubenswrapper[4605]: I1001 13:52:31.281627 4605 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/20fe9925-9f6b-4b69-a13d-e8ff88daaec6-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 01 13:52:31 crc kubenswrapper[4605]: I1001 13:52:31.281648 4605 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/20fe9925-9f6b-4b69-a13d-e8ff88daaec6-registry-certificates\") on node \"crc\" DevicePath \"\"" Oct 01 13:52:31 crc kubenswrapper[4605]: I1001 13:52:31.286704 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20fe9925-9f6b-4b69-a13d-e8ff88daaec6-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "20fe9925-9f6b-4b69-a13d-e8ff88daaec6" (UID: "20fe9925-9f6b-4b69-a13d-e8ff88daaec6"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:52:31 crc kubenswrapper[4605]: I1001 13:52:31.287651 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20fe9925-9f6b-4b69-a13d-e8ff88daaec6-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "20fe9925-9f6b-4b69-a13d-e8ff88daaec6" (UID: "20fe9925-9f6b-4b69-a13d-e8ff88daaec6"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:52:31 crc kubenswrapper[4605]: I1001 13:52:31.287970 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20fe9925-9f6b-4b69-a13d-e8ff88daaec6-kube-api-access-dw4zp" (OuterVolumeSpecName: "kube-api-access-dw4zp") pod "20fe9925-9f6b-4b69-a13d-e8ff88daaec6" (UID: "20fe9925-9f6b-4b69-a13d-e8ff88daaec6"). InnerVolumeSpecName "kube-api-access-dw4zp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:52:31 crc kubenswrapper[4605]: I1001 13:52:31.291753 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20fe9925-9f6b-4b69-a13d-e8ff88daaec6-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "20fe9925-9f6b-4b69-a13d-e8ff88daaec6" (UID: "20fe9925-9f6b-4b69-a13d-e8ff88daaec6"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:52:31 crc kubenswrapper[4605]: I1001 13:52:31.291966 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "20fe9925-9f6b-4b69-a13d-e8ff88daaec6" (UID: "20fe9925-9f6b-4b69-a13d-e8ff88daaec6"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Oct 01 13:52:31 crc kubenswrapper[4605]: I1001 13:52:31.316396 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/20fe9925-9f6b-4b69-a13d-e8ff88daaec6-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "20fe9925-9f6b-4b69-a13d-e8ff88daaec6" (UID: "20fe9925-9f6b-4b69-a13d-e8ff88daaec6"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 13:52:31 crc kubenswrapper[4605]: I1001 13:52:31.383289 4605 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/20fe9925-9f6b-4b69-a13d-e8ff88daaec6-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 01 13:52:31 crc kubenswrapper[4605]: I1001 13:52:31.383326 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dw4zp\" (UniqueName: \"kubernetes.io/projected/20fe9925-9f6b-4b69-a13d-e8ff88daaec6-kube-api-access-dw4zp\") on node \"crc\" DevicePath \"\"" Oct 01 13:52:31 crc kubenswrapper[4605]: I1001 13:52:31.383337 4605 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/20fe9925-9f6b-4b69-a13d-e8ff88daaec6-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Oct 01 13:52:31 crc kubenswrapper[4605]: I1001 13:52:31.383346 4605 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/20fe9925-9f6b-4b69-a13d-e8ff88daaec6-registry-tls\") on node \"crc\" DevicePath \"\"" Oct 01 13:52:31 crc kubenswrapper[4605]: I1001 13:52:31.383356 4605 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/20fe9925-9f6b-4b69-a13d-e8ff88daaec6-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Oct 01 13:52:32 crc kubenswrapper[4605]: I1001 13:52:32.143411 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" event={"ID":"20fe9925-9f6b-4b69-a13d-e8ff88daaec6","Type":"ContainerDied","Data":"b2cf20413417acb64c83ee50e4dcb9325ca820cae7e2b5bcb9c30fb3bfefad6e"} Oct 01 13:52:32 crc kubenswrapper[4605]: I1001 13:52:32.143477 4605 scope.go:117] "RemoveContainer" containerID="7c9529b403cdb1fb229b4da78c3369686e8b204613ebe3ce0f79128c6b740297" Oct 01 13:52:32 crc kubenswrapper[4605]: I1001 13:52:32.143509 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-ncbbt" Oct 01 13:52:32 crc kubenswrapper[4605]: I1001 13:52:32.162742 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-ncbbt"] Oct 01 13:52:32 crc kubenswrapper[4605]: I1001 13:52:32.167732 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-ncbbt"] Oct 01 13:52:33 crc kubenswrapper[4605]: I1001 13:52:33.937536 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20fe9925-9f6b-4b69-a13d-e8ff88daaec6" path="/var/lib/kubelet/pods/20fe9925-9f6b-4b69-a13d-e8ff88daaec6/volumes" Oct 01 13:53:51 crc kubenswrapper[4605]: I1001 13:53:51.631472 4605 patch_prober.go:28] interesting pod/machine-config-daemon-zdjh7 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 13:53:51 crc kubenswrapper[4605]: I1001 13:53:51.632049 4605 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 13:54:21 crc kubenswrapper[4605]: I1001 13:54:21.631384 4605 patch_prober.go:28] interesting pod/machine-config-daemon-zdjh7 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 13:54:21 crc kubenswrapper[4605]: I1001 13:54:21.631948 4605 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 13:54:51 crc kubenswrapper[4605]: I1001 13:54:51.631288 4605 patch_prober.go:28] interesting pod/machine-config-daemon-zdjh7 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 13:54:51 crc kubenswrapper[4605]: I1001 13:54:51.631970 4605 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 13:54:51 crc kubenswrapper[4605]: I1001 13:54:51.632071 4605 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" Oct 01 13:54:51 crc kubenswrapper[4605]: I1001 13:54:51.632993 4605 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ab2e158b8dbbad131cf42220449cc89d2e9b1e83ce456c3ebd1aa3f78648df9e"} pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 01 13:54:51 crc kubenswrapper[4605]: I1001 13:54:51.633155 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" containerName="machine-config-daemon" containerID="cri-o://ab2e158b8dbbad131cf42220449cc89d2e9b1e83ce456c3ebd1aa3f78648df9e" gracePeriod=600 Oct 01 13:54:51 crc kubenswrapper[4605]: I1001 13:54:51.908199 4605 generic.go:334] "Generic (PLEG): container finished" podID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" containerID="ab2e158b8dbbad131cf42220449cc89d2e9b1e83ce456c3ebd1aa3f78648df9e" exitCode=0 Oct 01 13:54:51 crc kubenswrapper[4605]: I1001 13:54:51.908260 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" event={"ID":"f3023060-c8ae-492b-b1cb-a418d9a8e59f","Type":"ContainerDied","Data":"ab2e158b8dbbad131cf42220449cc89d2e9b1e83ce456c3ebd1aa3f78648df9e"} Oct 01 13:54:51 crc kubenswrapper[4605]: I1001 13:54:51.908555 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" event={"ID":"f3023060-c8ae-492b-b1cb-a418d9a8e59f","Type":"ContainerStarted","Data":"57b43180ba9a3ef7b3e3cb1260253e07ed74787366be1e64f3a3708a8ee8ce49"} Oct 01 13:54:51 crc kubenswrapper[4605]: I1001 13:54:51.908577 4605 scope.go:117] "RemoveContainer" containerID="79801dc6bc063ddb6a797ea140823ed637f359b821d68c718f59b852dd6781bd" Oct 01 13:55:21 crc kubenswrapper[4605]: I1001 13:55:21.874705 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-7cs6w"] Oct 01 13:55:21 crc kubenswrapper[4605]: E1001 13:55:21.875579 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20fe9925-9f6b-4b69-a13d-e8ff88daaec6" containerName="registry" Oct 01 13:55:21 crc kubenswrapper[4605]: I1001 13:55:21.875715 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="20fe9925-9f6b-4b69-a13d-e8ff88daaec6" containerName="registry" Oct 01 13:55:21 crc kubenswrapper[4605]: I1001 13:55:21.876203 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="20fe9925-9f6b-4b69-a13d-e8ff88daaec6" containerName="registry" Oct 01 13:55:21 crc kubenswrapper[4605]: I1001 13:55:21.876652 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-7cs6w" Oct 01 13:55:21 crc kubenswrapper[4605]: I1001 13:55:21.878622 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Oct 01 13:55:21 crc kubenswrapper[4605]: I1001 13:55:21.879179 4605 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-62jwc" Oct 01 13:55:21 crc kubenswrapper[4605]: I1001 13:55:21.879317 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Oct 01 13:55:21 crc kubenswrapper[4605]: I1001 13:55:21.881854 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-5b446d88c5-rt7m9"] Oct 01 13:55:21 crc kubenswrapper[4605]: I1001 13:55:21.882639 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-rt7m9" Oct 01 13:55:21 crc kubenswrapper[4605]: I1001 13:55:21.888215 4605 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-dzf2g" Oct 01 13:55:21 crc kubenswrapper[4605]: I1001 13:55:21.891865 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-7cs6w"] Oct 01 13:55:21 crc kubenswrapper[4605]: I1001 13:55:21.901001 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-rt7m9"] Oct 01 13:55:21 crc kubenswrapper[4605]: I1001 13:55:21.937633 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-478v7"] Oct 01 13:55:21 crc kubenswrapper[4605]: I1001 13:55:21.938576 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-478v7" Oct 01 13:55:21 crc kubenswrapper[4605]: I1001 13:55:21.945511 4605 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-2gftp" Oct 01 13:55:21 crc kubenswrapper[4605]: I1001 13:55:21.969210 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-478v7"] Oct 01 13:55:22 crc kubenswrapper[4605]: I1001 13:55:22.063001 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-prdt9\" (UniqueName: \"kubernetes.io/projected/0fccd814-2572-49cb-b325-549214e05fc2-kube-api-access-prdt9\") pod \"cert-manager-webhook-5655c58dd6-478v7\" (UID: \"0fccd814-2572-49cb-b325-549214e05fc2\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-478v7" Oct 01 13:55:22 crc kubenswrapper[4605]: I1001 13:55:22.063065 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jbjjd\" (UniqueName: \"kubernetes.io/projected/ce49c1e2-5b54-442f-9c7c-5242886a218a-kube-api-access-jbjjd\") pod \"cert-manager-5b446d88c5-rt7m9\" (UID: \"ce49c1e2-5b54-442f-9c7c-5242886a218a\") " pod="cert-manager/cert-manager-5b446d88c5-rt7m9" Oct 01 13:55:22 crc kubenswrapper[4605]: I1001 13:55:22.063118 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mwn82\" (UniqueName: \"kubernetes.io/projected/365338db-6cc8-4281-a9a8-665a9c64a850-kube-api-access-mwn82\") pod \"cert-manager-cainjector-7f985d654d-7cs6w\" (UID: \"365338db-6cc8-4281-a9a8-665a9c64a850\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-7cs6w" Oct 01 13:55:22 crc kubenswrapper[4605]: I1001 13:55:22.164604 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jbjjd\" (UniqueName: \"kubernetes.io/projected/ce49c1e2-5b54-442f-9c7c-5242886a218a-kube-api-access-jbjjd\") pod \"cert-manager-5b446d88c5-rt7m9\" (UID: \"ce49c1e2-5b54-442f-9c7c-5242886a218a\") " pod="cert-manager/cert-manager-5b446d88c5-rt7m9" Oct 01 13:55:22 crc kubenswrapper[4605]: I1001 13:55:22.164693 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mwn82\" (UniqueName: \"kubernetes.io/projected/365338db-6cc8-4281-a9a8-665a9c64a850-kube-api-access-mwn82\") pod \"cert-manager-cainjector-7f985d654d-7cs6w\" (UID: \"365338db-6cc8-4281-a9a8-665a9c64a850\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-7cs6w" Oct 01 13:55:22 crc kubenswrapper[4605]: I1001 13:55:22.164740 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-prdt9\" (UniqueName: \"kubernetes.io/projected/0fccd814-2572-49cb-b325-549214e05fc2-kube-api-access-prdt9\") pod \"cert-manager-webhook-5655c58dd6-478v7\" (UID: \"0fccd814-2572-49cb-b325-549214e05fc2\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-478v7" Oct 01 13:55:22 crc kubenswrapper[4605]: I1001 13:55:22.185881 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mwn82\" (UniqueName: \"kubernetes.io/projected/365338db-6cc8-4281-a9a8-665a9c64a850-kube-api-access-mwn82\") pod \"cert-manager-cainjector-7f985d654d-7cs6w\" (UID: \"365338db-6cc8-4281-a9a8-665a9c64a850\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-7cs6w" Oct 01 13:55:22 crc kubenswrapper[4605]: I1001 13:55:22.186048 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jbjjd\" (UniqueName: \"kubernetes.io/projected/ce49c1e2-5b54-442f-9c7c-5242886a218a-kube-api-access-jbjjd\") pod \"cert-manager-5b446d88c5-rt7m9\" (UID: \"ce49c1e2-5b54-442f-9c7c-5242886a218a\") " pod="cert-manager/cert-manager-5b446d88c5-rt7m9" Oct 01 13:55:22 crc kubenswrapper[4605]: I1001 13:55:22.188055 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-prdt9\" (UniqueName: \"kubernetes.io/projected/0fccd814-2572-49cb-b325-549214e05fc2-kube-api-access-prdt9\") pod \"cert-manager-webhook-5655c58dd6-478v7\" (UID: \"0fccd814-2572-49cb-b325-549214e05fc2\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-478v7" Oct 01 13:55:22 crc kubenswrapper[4605]: I1001 13:55:22.198807 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-7cs6w" Oct 01 13:55:22 crc kubenswrapper[4605]: I1001 13:55:22.207300 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-rt7m9" Oct 01 13:55:22 crc kubenswrapper[4605]: I1001 13:55:22.258826 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-478v7" Oct 01 13:55:22 crc kubenswrapper[4605]: I1001 13:55:22.401973 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-rt7m9"] Oct 01 13:55:22 crc kubenswrapper[4605]: W1001 13:55:22.416829 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podce49c1e2_5b54_442f_9c7c_5242886a218a.slice/crio-1cbce5156348d35a5e4c15e7cd2a0405e8465e5b21a7928902941debbac60329 WatchSource:0}: Error finding container 1cbce5156348d35a5e4c15e7cd2a0405e8465e5b21a7928902941debbac60329: Status 404 returned error can't find the container with id 1cbce5156348d35a5e4c15e7cd2a0405e8465e5b21a7928902941debbac60329 Oct 01 13:55:22 crc kubenswrapper[4605]: I1001 13:55:22.419629 4605 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 01 13:55:22 crc kubenswrapper[4605]: I1001 13:55:22.459693 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-7cs6w"] Oct 01 13:55:22 crc kubenswrapper[4605]: W1001 13:55:22.466382 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod365338db_6cc8_4281_a9a8_665a9c64a850.slice/crio-e788504685401db8f724391a32b22344dbc58389c65b665f21a1e396231e992c WatchSource:0}: Error finding container e788504685401db8f724391a32b22344dbc58389c65b665f21a1e396231e992c: Status 404 returned error can't find the container with id e788504685401db8f724391a32b22344dbc58389c65b665f21a1e396231e992c Oct 01 13:55:22 crc kubenswrapper[4605]: I1001 13:55:22.510697 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-478v7"] Oct 01 13:55:23 crc kubenswrapper[4605]: I1001 13:55:23.064364 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-478v7" event={"ID":"0fccd814-2572-49cb-b325-549214e05fc2","Type":"ContainerStarted","Data":"dddfe5de4778fc212bca04f1e021650191b6b16aa3362894a58803082d5f01f5"} Oct 01 13:55:23 crc kubenswrapper[4605]: I1001 13:55:23.067662 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-7cs6w" event={"ID":"365338db-6cc8-4281-a9a8-665a9c64a850","Type":"ContainerStarted","Data":"e788504685401db8f724391a32b22344dbc58389c65b665f21a1e396231e992c"} Oct 01 13:55:23 crc kubenswrapper[4605]: I1001 13:55:23.068936 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-rt7m9" event={"ID":"ce49c1e2-5b54-442f-9c7c-5242886a218a","Type":"ContainerStarted","Data":"1cbce5156348d35a5e4c15e7cd2a0405e8465e5b21a7928902941debbac60329"} Oct 01 13:55:26 crc kubenswrapper[4605]: I1001 13:55:26.085570 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-rt7m9" event={"ID":"ce49c1e2-5b54-442f-9c7c-5242886a218a","Type":"ContainerStarted","Data":"c2ade8053e6f2f095e1a11e16492359b5040a41f3d1de81700bb8e0fcc5f3108"} Oct 01 13:55:26 crc kubenswrapper[4605]: I1001 13:55:26.086768 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-478v7" event={"ID":"0fccd814-2572-49cb-b325-549214e05fc2","Type":"ContainerStarted","Data":"f068ad2c119d72cc51202bf3c93276539f5764d2a7f6d4cf20d52feed33be960"} Oct 01 13:55:26 crc kubenswrapper[4605]: I1001 13:55:26.086896 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-5655c58dd6-478v7" Oct 01 13:55:26 crc kubenswrapper[4605]: I1001 13:55:26.088733 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-7cs6w" event={"ID":"365338db-6cc8-4281-a9a8-665a9c64a850","Type":"ContainerStarted","Data":"6556e31400bb78797304f58994ee233a46da16f80bc45f63b1582c8b0bb03c41"} Oct 01 13:55:26 crc kubenswrapper[4605]: I1001 13:55:26.101079 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-5b446d88c5-rt7m9" podStartSLOduration=2.123013588 podStartE2EDuration="5.101057466s" podCreationTimestamp="2025-10-01 13:55:21 +0000 UTC" firstStartedPulling="2025-10-01 13:55:22.419377255 +0000 UTC m=+645.163353463" lastFinishedPulling="2025-10-01 13:55:25.397421143 +0000 UTC m=+648.141397341" observedRunningTime="2025-10-01 13:55:26.099349563 +0000 UTC m=+648.843325771" watchObservedRunningTime="2025-10-01 13:55:26.101057466 +0000 UTC m=+648.845033674" Oct 01 13:55:26 crc kubenswrapper[4605]: I1001 13:55:26.150535 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7f985d654d-7cs6w" podStartSLOduration=2.168523145 podStartE2EDuration="5.150521213s" podCreationTimestamp="2025-10-01 13:55:21 +0000 UTC" firstStartedPulling="2025-10-01 13:55:22.471189792 +0000 UTC m=+645.215166000" lastFinishedPulling="2025-10-01 13:55:25.45318786 +0000 UTC m=+648.197164068" observedRunningTime="2025-10-01 13:55:26.148017049 +0000 UTC m=+648.891993267" watchObservedRunningTime="2025-10-01 13:55:26.150521213 +0000 UTC m=+648.894497411" Oct 01 13:55:26 crc kubenswrapper[4605]: I1001 13:55:26.167911 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-5655c58dd6-478v7" podStartSLOduration=2.283928585 podStartE2EDuration="5.167893644s" podCreationTimestamp="2025-10-01 13:55:21 +0000 UTC" firstStartedPulling="2025-10-01 13:55:22.51842467 +0000 UTC m=+645.262400878" lastFinishedPulling="2025-10-01 13:55:25.402389729 +0000 UTC m=+648.146365937" observedRunningTime="2025-10-01 13:55:26.1653735 +0000 UTC m=+648.909349708" watchObservedRunningTime="2025-10-01 13:55:26.167893644 +0000 UTC m=+648.911869862" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.262166 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-5655c58dd6-478v7" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.438904 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-kzv4p"] Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.439749 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" podUID="e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" containerName="ovn-controller" containerID="cri-o://c8442849a4d37d618e701a2ea4c1dd33944894765c874fd5635c74d37a7f411b" gracePeriod=30 Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.439846 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" podUID="e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://09ecc57bc8186c325ef0bd38c17882024dda3714b52f9b0295a75ed93b2d6736" gracePeriod=30 Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.439916 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" podUID="e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" containerName="nbdb" containerID="cri-o://8f6f39aa4d204b41067d2705ce828e79752ca4e5b7689821e34c98f5041ce925" gracePeriod=30 Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.439889 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" podUID="e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" containerName="northd" containerID="cri-o://ddba08349bbb0f7040bb04f37d4b5a137de327a5c4a7a5689396a2a1925b062a" gracePeriod=30 Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.439862 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" podUID="e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" containerName="ovn-acl-logging" containerID="cri-o://98143f416c776bf79ee24b42b70d88d1cbe4528bd7a1c0fc886a8f6e711c6400" gracePeriod=30 Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.439846 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" podUID="e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" containerName="kube-rbac-proxy-node" containerID="cri-o://56f2691df87d1c843a73c06c39c7ad1d5477fb1f10c6f3f797f658e7d0c0d04d" gracePeriod=30 Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.439782 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" podUID="e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" containerName="sbdb" containerID="cri-o://2d1219f1caadb9f0992fa56b158a23ace3616d178360c4690657deb351a3ccaa" gracePeriod=30 Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.489451 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" podUID="e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" containerName="ovnkube-controller" containerID="cri-o://250086e7d7b8cdd7761b7001f3eca6b6ea1a3008cffc0778f2c1b6c092578ae4" gracePeriod=30 Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.760614 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-kzv4p_e0b90c02-c41c-4f5b-ae0a-c6444435a3ae/ovnkube-controller/3.log" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.762513 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-kzv4p_e0b90c02-c41c-4f5b-ae0a-c6444435a3ae/ovn-acl-logging/0.log" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.763034 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-kzv4p_e0b90c02-c41c-4f5b-ae0a-c6444435a3ae/ovn-controller/0.log" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.763442 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.824086 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-ovn-node-metrics-cert\") pod \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.824137 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-host-var-lib-cni-networks-ovn-kubernetes\") pod \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.824168 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-ovnkube-config\") pod \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.824184 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-host-cni-netd\") pod \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.824204 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-node-log\") pod \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.824222 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-var-lib-openvswitch\") pod \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.824244 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-host-kubelet\") pod \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.824259 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" (UID: "e0b90c02-c41c-4f5b-ae0a-c6444435a3ae"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.824259 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" (UID: "e0b90c02-c41c-4f5b-ae0a-c6444435a3ae"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.824277 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-host-slash\") pod \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.824329 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-host-slash" (OuterVolumeSpecName: "host-slash") pod "e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" (UID: "e0b90c02-c41c-4f5b-ae0a-c6444435a3ae"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.824344 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-run-systemd\") pod \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.824363 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-node-log" (OuterVolumeSpecName: "node-log") pod "e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" (UID: "e0b90c02-c41c-4f5b-ae0a-c6444435a3ae"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.824381 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" (UID: "e0b90c02-c41c-4f5b-ae0a-c6444435a3ae"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.824401 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" (UID: "e0b90c02-c41c-4f5b-ae0a-c6444435a3ae"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.824417 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-systemd-units\") pod \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.824492 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-host-run-netns\") pod \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.824508 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-host-cni-bin\") pod \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.824552 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" (UID: "e0b90c02-c41c-4f5b-ae0a-c6444435a3ae"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.824582 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" (UID: "e0b90c02-c41c-4f5b-ae0a-c6444435a3ae"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.824621 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-log-socket\") pod \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.824651 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-env-overrides\") pod \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.824664 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-log-socket" (OuterVolumeSpecName: "log-socket") pod "e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" (UID: "e0b90c02-c41c-4f5b-ae0a-c6444435a3ae"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.824703 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-etc-openvswitch\") pod \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.824721 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kmn8q\" (UniqueName: \"kubernetes.io/projected/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-kube-api-access-kmn8q\") pod \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.824711 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" (UID: "e0b90c02-c41c-4f5b-ae0a-c6444435a3ae"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.824737 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-ovnkube-script-lib\") pod \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.824813 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-run-ovn\") pod \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.824858 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-run-openvswitch\") pod \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.824893 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-host-run-ovn-kubernetes\") pod \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\" (UID: \"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae\") " Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.824768 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" (UID: "e0b90c02-c41c-4f5b-ae0a-c6444435a3ae"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.824897 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" (UID: "e0b90c02-c41c-4f5b-ae0a-c6444435a3ae"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.825187 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" (UID: "e0b90c02-c41c-4f5b-ae0a-c6444435a3ae"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.825212 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" (UID: "e0b90c02-c41c-4f5b-ae0a-c6444435a3ae"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.825312 4605 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-host-run-netns\") on node \"crc\" DevicePath \"\"" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.825350 4605 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-host-cni-bin\") on node \"crc\" DevicePath \"\"" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.825360 4605 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-log-socket\") on node \"crc\" DevicePath \"\"" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.825369 4605 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-env-overrides\") on node \"crc\" DevicePath \"\"" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.825377 4605 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.825385 4605 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-run-ovn\") on node \"crc\" DevicePath \"\"" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.825394 4605 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.825403 4605 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-ovnkube-config\") on node \"crc\" DevicePath \"\"" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.825436 4605 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-host-cni-netd\") on node \"crc\" DevicePath \"\"" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.825763 4605 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-node-log\") on node \"crc\" DevicePath \"\"" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.825797 4605 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-host-kubelet\") on node \"crc\" DevicePath \"\"" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.825247 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" (UID: "e0b90c02-c41c-4f5b-ae0a-c6444435a3ae"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.825824 4605 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.825336 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" (UID: "e0b90c02-c41c-4f5b-ae0a-c6444435a3ae"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.825491 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" (UID: "e0b90c02-c41c-4f5b-ae0a-c6444435a3ae"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.825834 4605 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-host-slash\") on node \"crc\" DevicePath \"\"" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.825876 4605 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-systemd-units\") on node \"crc\" DevicePath \"\"" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.829937 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" (UID: "e0b90c02-c41c-4f5b-ae0a-c6444435a3ae"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.830361 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-kube-api-access-kmn8q" (OuterVolumeSpecName: "kube-api-access-kmn8q") pod "e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" (UID: "e0b90c02-c41c-4f5b-ae0a-c6444435a3ae"). InnerVolumeSpecName "kube-api-access-kmn8q". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.831420 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-rtgh8"] Oct 01 13:55:32 crc kubenswrapper[4605]: E1001 13:55:32.831593 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" containerName="ovnkube-controller" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.831610 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" containerName="ovnkube-controller" Oct 01 13:55:32 crc kubenswrapper[4605]: E1001 13:55:32.831617 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" containerName="ovn-controller" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.831624 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" containerName="ovn-controller" Oct 01 13:55:32 crc kubenswrapper[4605]: E1001 13:55:32.831633 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" containerName="kube-rbac-proxy-ovn-metrics" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.831638 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" containerName="kube-rbac-proxy-ovn-metrics" Oct 01 13:55:32 crc kubenswrapper[4605]: E1001 13:55:32.831647 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" containerName="sbdb" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.831652 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" containerName="sbdb" Oct 01 13:55:32 crc kubenswrapper[4605]: E1001 13:55:32.831661 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" containerName="kubecfg-setup" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.831666 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" containerName="kubecfg-setup" Oct 01 13:55:32 crc kubenswrapper[4605]: E1001 13:55:32.831675 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" containerName="ovn-acl-logging" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.831681 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" containerName="ovn-acl-logging" Oct 01 13:55:32 crc kubenswrapper[4605]: E1001 13:55:32.831688 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" containerName="kube-rbac-proxy-node" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.831693 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" containerName="kube-rbac-proxy-node" Oct 01 13:55:32 crc kubenswrapper[4605]: E1001 13:55:32.831701 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" containerName="ovnkube-controller" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.831706 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" containerName="ovnkube-controller" Oct 01 13:55:32 crc kubenswrapper[4605]: E1001 13:55:32.831714 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" containerName="northd" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.831721 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" containerName="northd" Oct 01 13:55:32 crc kubenswrapper[4605]: E1001 13:55:32.831728 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" containerName="nbdb" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.831734 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" containerName="nbdb" Oct 01 13:55:32 crc kubenswrapper[4605]: E1001 13:55:32.831743 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" containerName="ovnkube-controller" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.831748 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" containerName="ovnkube-controller" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.831825 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" containerName="kube-rbac-proxy-ovn-metrics" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.831835 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" containerName="ovnkube-controller" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.831841 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" containerName="ovnkube-controller" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.831849 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" containerName="ovnkube-controller" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.831856 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" containerName="sbdb" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.831863 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" containerName="ovn-controller" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.831872 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" containerName="kube-rbac-proxy-node" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.831879 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" containerName="northd" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.831886 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" containerName="ovnkube-controller" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.831893 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" containerName="ovn-acl-logging" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.831903 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" containerName="nbdb" Oct 01 13:55:32 crc kubenswrapper[4605]: E1001 13:55:32.831979 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" containerName="ovnkube-controller" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.831987 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" containerName="ovnkube-controller" Oct 01 13:55:32 crc kubenswrapper[4605]: E1001 13:55:32.831994 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" containerName="ovnkube-controller" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.831999 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" containerName="ovnkube-controller" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.832080 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" containerName="ovnkube-controller" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.833487 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.838638 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" (UID: "e0b90c02-c41c-4f5b-ae0a-c6444435a3ae"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.926621 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/33464318-eae4-4013-8a6d-3bbaaf2904ef-host-kubelet\") pod \"ovnkube-node-rtgh8\" (UID: \"33464318-eae4-4013-8a6d-3bbaaf2904ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.926684 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/33464318-eae4-4013-8a6d-3bbaaf2904ef-host-run-netns\") pod \"ovnkube-node-rtgh8\" (UID: \"33464318-eae4-4013-8a6d-3bbaaf2904ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.926709 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/33464318-eae4-4013-8a6d-3bbaaf2904ef-host-cni-bin\") pod \"ovnkube-node-rtgh8\" (UID: \"33464318-eae4-4013-8a6d-3bbaaf2904ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.926736 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/33464318-eae4-4013-8a6d-3bbaaf2904ef-ovnkube-script-lib\") pod \"ovnkube-node-rtgh8\" (UID: \"33464318-eae4-4013-8a6d-3bbaaf2904ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.926768 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/33464318-eae4-4013-8a6d-3bbaaf2904ef-node-log\") pod \"ovnkube-node-rtgh8\" (UID: \"33464318-eae4-4013-8a6d-3bbaaf2904ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.926817 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/33464318-eae4-4013-8a6d-3bbaaf2904ef-ovnkube-config\") pod \"ovnkube-node-rtgh8\" (UID: \"33464318-eae4-4013-8a6d-3bbaaf2904ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.926841 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/33464318-eae4-4013-8a6d-3bbaaf2904ef-run-openvswitch\") pod \"ovnkube-node-rtgh8\" (UID: \"33464318-eae4-4013-8a6d-3bbaaf2904ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.926861 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/33464318-eae4-4013-8a6d-3bbaaf2904ef-log-socket\") pod \"ovnkube-node-rtgh8\" (UID: \"33464318-eae4-4013-8a6d-3bbaaf2904ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.926894 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/33464318-eae4-4013-8a6d-3bbaaf2904ef-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-rtgh8\" (UID: \"33464318-eae4-4013-8a6d-3bbaaf2904ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.926925 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/33464318-eae4-4013-8a6d-3bbaaf2904ef-var-lib-openvswitch\") pod \"ovnkube-node-rtgh8\" (UID: \"33464318-eae4-4013-8a6d-3bbaaf2904ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.926952 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/33464318-eae4-4013-8a6d-3bbaaf2904ef-host-slash\") pod \"ovnkube-node-rtgh8\" (UID: \"33464318-eae4-4013-8a6d-3bbaaf2904ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.927076 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/33464318-eae4-4013-8a6d-3bbaaf2904ef-ovn-node-metrics-cert\") pod \"ovnkube-node-rtgh8\" (UID: \"33464318-eae4-4013-8a6d-3bbaaf2904ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.927146 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/33464318-eae4-4013-8a6d-3bbaaf2904ef-etc-openvswitch\") pod \"ovnkube-node-rtgh8\" (UID: \"33464318-eae4-4013-8a6d-3bbaaf2904ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.927165 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/33464318-eae4-4013-8a6d-3bbaaf2904ef-host-run-ovn-kubernetes\") pod \"ovnkube-node-rtgh8\" (UID: \"33464318-eae4-4013-8a6d-3bbaaf2904ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.927202 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/33464318-eae4-4013-8a6d-3bbaaf2904ef-run-systemd\") pod \"ovnkube-node-rtgh8\" (UID: \"33464318-eae4-4013-8a6d-3bbaaf2904ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.927335 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/33464318-eae4-4013-8a6d-3bbaaf2904ef-run-ovn\") pod \"ovnkube-node-rtgh8\" (UID: \"33464318-eae4-4013-8a6d-3bbaaf2904ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.927397 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/33464318-eae4-4013-8a6d-3bbaaf2904ef-env-overrides\") pod \"ovnkube-node-rtgh8\" (UID: \"33464318-eae4-4013-8a6d-3bbaaf2904ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.927428 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hc8cn\" (UniqueName: \"kubernetes.io/projected/33464318-eae4-4013-8a6d-3bbaaf2904ef-kube-api-access-hc8cn\") pod \"ovnkube-node-rtgh8\" (UID: \"33464318-eae4-4013-8a6d-3bbaaf2904ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.927490 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/33464318-eae4-4013-8a6d-3bbaaf2904ef-systemd-units\") pod \"ovnkube-node-rtgh8\" (UID: \"33464318-eae4-4013-8a6d-3bbaaf2904ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.927527 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/33464318-eae4-4013-8a6d-3bbaaf2904ef-host-cni-netd\") pod \"ovnkube-node-rtgh8\" (UID: \"33464318-eae4-4013-8a6d-3bbaaf2904ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.927638 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kmn8q\" (UniqueName: \"kubernetes.io/projected/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-kube-api-access-kmn8q\") on node \"crc\" DevicePath \"\"" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.927652 4605 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.927665 4605 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-run-openvswitch\") on node \"crc\" DevicePath \"\"" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.927677 4605 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.927690 4605 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Oct 01 13:55:32 crc kubenswrapper[4605]: I1001 13:55:32.927702 4605 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae-run-systemd\") on node \"crc\" DevicePath \"\"" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.028732 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/33464318-eae4-4013-8a6d-3bbaaf2904ef-ovn-node-metrics-cert\") pod \"ovnkube-node-rtgh8\" (UID: \"33464318-eae4-4013-8a6d-3bbaaf2904ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.028804 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/33464318-eae4-4013-8a6d-3bbaaf2904ef-host-run-ovn-kubernetes\") pod \"ovnkube-node-rtgh8\" (UID: \"33464318-eae4-4013-8a6d-3bbaaf2904ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.028840 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/33464318-eae4-4013-8a6d-3bbaaf2904ef-etc-openvswitch\") pod \"ovnkube-node-rtgh8\" (UID: \"33464318-eae4-4013-8a6d-3bbaaf2904ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.028876 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/33464318-eae4-4013-8a6d-3bbaaf2904ef-run-systemd\") pod \"ovnkube-node-rtgh8\" (UID: \"33464318-eae4-4013-8a6d-3bbaaf2904ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.028916 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/33464318-eae4-4013-8a6d-3bbaaf2904ef-run-ovn\") pod \"ovnkube-node-rtgh8\" (UID: \"33464318-eae4-4013-8a6d-3bbaaf2904ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.028933 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/33464318-eae4-4013-8a6d-3bbaaf2904ef-host-run-ovn-kubernetes\") pod \"ovnkube-node-rtgh8\" (UID: \"33464318-eae4-4013-8a6d-3bbaaf2904ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.028971 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/33464318-eae4-4013-8a6d-3bbaaf2904ef-env-overrides\") pod \"ovnkube-node-rtgh8\" (UID: \"33464318-eae4-4013-8a6d-3bbaaf2904ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.029027 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hc8cn\" (UniqueName: \"kubernetes.io/projected/33464318-eae4-4013-8a6d-3bbaaf2904ef-kube-api-access-hc8cn\") pod \"ovnkube-node-rtgh8\" (UID: \"33464318-eae4-4013-8a6d-3bbaaf2904ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.029125 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/33464318-eae4-4013-8a6d-3bbaaf2904ef-systemd-units\") pod \"ovnkube-node-rtgh8\" (UID: \"33464318-eae4-4013-8a6d-3bbaaf2904ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.029149 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/33464318-eae4-4013-8a6d-3bbaaf2904ef-host-cni-netd\") pod \"ovnkube-node-rtgh8\" (UID: \"33464318-eae4-4013-8a6d-3bbaaf2904ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.029187 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/33464318-eae4-4013-8a6d-3bbaaf2904ef-host-kubelet\") pod \"ovnkube-node-rtgh8\" (UID: \"33464318-eae4-4013-8a6d-3bbaaf2904ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.029209 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/33464318-eae4-4013-8a6d-3bbaaf2904ef-host-run-netns\") pod \"ovnkube-node-rtgh8\" (UID: \"33464318-eae4-4013-8a6d-3bbaaf2904ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.029244 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/33464318-eae4-4013-8a6d-3bbaaf2904ef-host-cni-bin\") pod \"ovnkube-node-rtgh8\" (UID: \"33464318-eae4-4013-8a6d-3bbaaf2904ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.029286 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/33464318-eae4-4013-8a6d-3bbaaf2904ef-ovnkube-script-lib\") pod \"ovnkube-node-rtgh8\" (UID: \"33464318-eae4-4013-8a6d-3bbaaf2904ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.029312 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/33464318-eae4-4013-8a6d-3bbaaf2904ef-node-log\") pod \"ovnkube-node-rtgh8\" (UID: \"33464318-eae4-4013-8a6d-3bbaaf2904ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.029369 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/33464318-eae4-4013-8a6d-3bbaaf2904ef-ovnkube-config\") pod \"ovnkube-node-rtgh8\" (UID: \"33464318-eae4-4013-8a6d-3bbaaf2904ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.029374 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/33464318-eae4-4013-8a6d-3bbaaf2904ef-host-cni-netd\") pod \"ovnkube-node-rtgh8\" (UID: \"33464318-eae4-4013-8a6d-3bbaaf2904ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.029391 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/33464318-eae4-4013-8a6d-3bbaaf2904ef-run-openvswitch\") pod \"ovnkube-node-rtgh8\" (UID: \"33464318-eae4-4013-8a6d-3bbaaf2904ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.029452 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/33464318-eae4-4013-8a6d-3bbaaf2904ef-run-systemd\") pod \"ovnkube-node-rtgh8\" (UID: \"33464318-eae4-4013-8a6d-3bbaaf2904ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.029468 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/33464318-eae4-4013-8a6d-3bbaaf2904ef-host-cni-bin\") pod \"ovnkube-node-rtgh8\" (UID: \"33464318-eae4-4013-8a6d-3bbaaf2904ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.029518 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/33464318-eae4-4013-8a6d-3bbaaf2904ef-run-ovn\") pod \"ovnkube-node-rtgh8\" (UID: \"33464318-eae4-4013-8a6d-3bbaaf2904ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.029490 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/33464318-eae4-4013-8a6d-3bbaaf2904ef-etc-openvswitch\") pod \"ovnkube-node-rtgh8\" (UID: \"33464318-eae4-4013-8a6d-3bbaaf2904ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.029556 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/33464318-eae4-4013-8a6d-3bbaaf2904ef-host-kubelet\") pod \"ovnkube-node-rtgh8\" (UID: \"33464318-eae4-4013-8a6d-3bbaaf2904ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.029633 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/33464318-eae4-4013-8a6d-3bbaaf2904ef-host-run-netns\") pod \"ovnkube-node-rtgh8\" (UID: \"33464318-eae4-4013-8a6d-3bbaaf2904ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.029685 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/33464318-eae4-4013-8a6d-3bbaaf2904ef-node-log\") pod \"ovnkube-node-rtgh8\" (UID: \"33464318-eae4-4013-8a6d-3bbaaf2904ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.029784 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/33464318-eae4-4013-8a6d-3bbaaf2904ef-systemd-units\") pod \"ovnkube-node-rtgh8\" (UID: \"33464318-eae4-4013-8a6d-3bbaaf2904ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.029850 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/33464318-eae4-4013-8a6d-3bbaaf2904ef-run-openvswitch\") pod \"ovnkube-node-rtgh8\" (UID: \"33464318-eae4-4013-8a6d-3bbaaf2904ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.029875 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/33464318-eae4-4013-8a6d-3bbaaf2904ef-env-overrides\") pod \"ovnkube-node-rtgh8\" (UID: \"33464318-eae4-4013-8a6d-3bbaaf2904ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.030806 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/33464318-eae4-4013-8a6d-3bbaaf2904ef-log-socket\") pod \"ovnkube-node-rtgh8\" (UID: \"33464318-eae4-4013-8a6d-3bbaaf2904ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.030829 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/33464318-eae4-4013-8a6d-3bbaaf2904ef-ovnkube-config\") pod \"ovnkube-node-rtgh8\" (UID: \"33464318-eae4-4013-8a6d-3bbaaf2904ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.030893 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/33464318-eae4-4013-8a6d-3bbaaf2904ef-log-socket\") pod \"ovnkube-node-rtgh8\" (UID: \"33464318-eae4-4013-8a6d-3bbaaf2904ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.031065 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/33464318-eae4-4013-8a6d-3bbaaf2904ef-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-rtgh8\" (UID: \"33464318-eae4-4013-8a6d-3bbaaf2904ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.031107 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/33464318-eae4-4013-8a6d-3bbaaf2904ef-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-rtgh8\" (UID: \"33464318-eae4-4013-8a6d-3bbaaf2904ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.031274 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/33464318-eae4-4013-8a6d-3bbaaf2904ef-var-lib-openvswitch\") pod \"ovnkube-node-rtgh8\" (UID: \"33464318-eae4-4013-8a6d-3bbaaf2904ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.031299 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/33464318-eae4-4013-8a6d-3bbaaf2904ef-host-slash\") pod \"ovnkube-node-rtgh8\" (UID: \"33464318-eae4-4013-8a6d-3bbaaf2904ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.031449 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/33464318-eae4-4013-8a6d-3bbaaf2904ef-host-slash\") pod \"ovnkube-node-rtgh8\" (UID: \"33464318-eae4-4013-8a6d-3bbaaf2904ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.031493 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/33464318-eae4-4013-8a6d-3bbaaf2904ef-var-lib-openvswitch\") pod \"ovnkube-node-rtgh8\" (UID: \"33464318-eae4-4013-8a6d-3bbaaf2904ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.032569 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/33464318-eae4-4013-8a6d-3bbaaf2904ef-ovn-node-metrics-cert\") pod \"ovnkube-node-rtgh8\" (UID: \"33464318-eae4-4013-8a6d-3bbaaf2904ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.032624 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/33464318-eae4-4013-8a6d-3bbaaf2904ef-ovnkube-script-lib\") pod \"ovnkube-node-rtgh8\" (UID: \"33464318-eae4-4013-8a6d-3bbaaf2904ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.050190 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hc8cn\" (UniqueName: \"kubernetes.io/projected/33464318-eae4-4013-8a6d-3bbaaf2904ef-kube-api-access-hc8cn\") pod \"ovnkube-node-rtgh8\" (UID: \"33464318-eae4-4013-8a6d-3bbaaf2904ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.127823 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-wgx5p_1c2ca71f-4cb0-4852-927d-af69be5d77f2/kube-multus/2.log" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.128454 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-wgx5p_1c2ca71f-4cb0-4852-927d-af69be5d77f2/kube-multus/1.log" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.128568 4605 generic.go:334] "Generic (PLEG): container finished" podID="1c2ca71f-4cb0-4852-927d-af69be5d77f2" containerID="ecb12288b76f5a9b7386a594a3210e745efc153651dcd926e6d3a04db0c2a2ee" exitCode=2 Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.128627 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-wgx5p" event={"ID":"1c2ca71f-4cb0-4852-927d-af69be5d77f2","Type":"ContainerDied","Data":"ecb12288b76f5a9b7386a594a3210e745efc153651dcd926e6d3a04db0c2a2ee"} Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.128759 4605 scope.go:117] "RemoveContainer" containerID="2c2a3ea80efc4b22dd64e7627c18212081f51882aa7616ba8bfa3f2b116f4bf9" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.129202 4605 scope.go:117] "RemoveContainer" containerID="ecb12288b76f5a9b7386a594a3210e745efc153651dcd926e6d3a04db0c2a2ee" Oct 01 13:55:33 crc kubenswrapper[4605]: E1001 13:55:33.129471 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-wgx5p_openshift-multus(1c2ca71f-4cb0-4852-927d-af69be5d77f2)\"" pod="openshift-multus/multus-wgx5p" podUID="1c2ca71f-4cb0-4852-927d-af69be5d77f2" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.133339 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-kzv4p_e0b90c02-c41c-4f5b-ae0a-c6444435a3ae/ovnkube-controller/3.log" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.134964 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-kzv4p_e0b90c02-c41c-4f5b-ae0a-c6444435a3ae/ovn-acl-logging/0.log" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.135465 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-kzv4p_e0b90c02-c41c-4f5b-ae0a-c6444435a3ae/ovn-controller/0.log" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.135793 4605 generic.go:334] "Generic (PLEG): container finished" podID="e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" containerID="250086e7d7b8cdd7761b7001f3eca6b6ea1a3008cffc0778f2c1b6c092578ae4" exitCode=0 Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.135814 4605 generic.go:334] "Generic (PLEG): container finished" podID="e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" containerID="2d1219f1caadb9f0992fa56b158a23ace3616d178360c4690657deb351a3ccaa" exitCode=0 Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.135821 4605 generic.go:334] "Generic (PLEG): container finished" podID="e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" containerID="8f6f39aa4d204b41067d2705ce828e79752ca4e5b7689821e34c98f5041ce925" exitCode=0 Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.135845 4605 generic.go:334] "Generic (PLEG): container finished" podID="e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" containerID="ddba08349bbb0f7040bb04f37d4b5a137de327a5c4a7a5689396a2a1925b062a" exitCode=0 Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.135851 4605 generic.go:334] "Generic (PLEG): container finished" podID="e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" containerID="09ecc57bc8186c325ef0bd38c17882024dda3714b52f9b0295a75ed93b2d6736" exitCode=0 Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.135858 4605 generic.go:334] "Generic (PLEG): container finished" podID="e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" containerID="56f2691df87d1c843a73c06c39c7ad1d5477fb1f10c6f3f797f658e7d0c0d04d" exitCode=0 Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.135865 4605 generic.go:334] "Generic (PLEG): container finished" podID="e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" containerID="98143f416c776bf79ee24b42b70d88d1cbe4528bd7a1c0fc886a8f6e711c6400" exitCode=143 Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.135872 4605 generic.go:334] "Generic (PLEG): container finished" podID="e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" containerID="c8442849a4d37d618e701a2ea4c1dd33944894765c874fd5635c74d37a7f411b" exitCode=143 Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.135892 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" event={"ID":"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae","Type":"ContainerDied","Data":"250086e7d7b8cdd7761b7001f3eca6b6ea1a3008cffc0778f2c1b6c092578ae4"} Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.135938 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" event={"ID":"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae","Type":"ContainerDied","Data":"2d1219f1caadb9f0992fa56b158a23ace3616d178360c4690657deb351a3ccaa"} Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.135953 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" event={"ID":"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae","Type":"ContainerDied","Data":"8f6f39aa4d204b41067d2705ce828e79752ca4e5b7689821e34c98f5041ce925"} Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.135964 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" event={"ID":"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae","Type":"ContainerDied","Data":"ddba08349bbb0f7040bb04f37d4b5a137de327a5c4a7a5689396a2a1925b062a"} Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.135974 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" event={"ID":"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae","Type":"ContainerDied","Data":"09ecc57bc8186c325ef0bd38c17882024dda3714b52f9b0295a75ed93b2d6736"} Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.136004 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" event={"ID":"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae","Type":"ContainerDied","Data":"56f2691df87d1c843a73c06c39c7ad1d5477fb1f10c6f3f797f658e7d0c0d04d"} Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.136014 4605 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"250086e7d7b8cdd7761b7001f3eca6b6ea1a3008cffc0778f2c1b6c092578ae4"} Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.136023 4605 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0be438218d8ebb0fffe42a99bb838e7aa749b7749c1ca18fb294cecb4fe8b4e4"} Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.136028 4605 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2d1219f1caadb9f0992fa56b158a23ace3616d178360c4690657deb351a3ccaa"} Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.136033 4605 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8f6f39aa4d204b41067d2705ce828e79752ca4e5b7689821e34c98f5041ce925"} Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.136038 4605 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ddba08349bbb0f7040bb04f37d4b5a137de327a5c4a7a5689396a2a1925b062a"} Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.136043 4605 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"09ecc57bc8186c325ef0bd38c17882024dda3714b52f9b0295a75ed93b2d6736"} Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.136048 4605 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"56f2691df87d1c843a73c06c39c7ad1d5477fb1f10c6f3f797f658e7d0c0d04d"} Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.136052 4605 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"98143f416c776bf79ee24b42b70d88d1cbe4528bd7a1c0fc886a8f6e711c6400"} Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.136057 4605 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c8442849a4d37d618e701a2ea4c1dd33944894765c874fd5635c74d37a7f411b"} Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.136076 4605 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6"} Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.136084 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" event={"ID":"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae","Type":"ContainerDied","Data":"98143f416c776bf79ee24b42b70d88d1cbe4528bd7a1c0fc886a8f6e711c6400"} Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.136116 4605 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"250086e7d7b8cdd7761b7001f3eca6b6ea1a3008cffc0778f2c1b6c092578ae4"} Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.136123 4605 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0be438218d8ebb0fffe42a99bb838e7aa749b7749c1ca18fb294cecb4fe8b4e4"} Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.136129 4605 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2d1219f1caadb9f0992fa56b158a23ace3616d178360c4690657deb351a3ccaa"} Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.136135 4605 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8f6f39aa4d204b41067d2705ce828e79752ca4e5b7689821e34c98f5041ce925"} Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.136141 4605 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ddba08349bbb0f7040bb04f37d4b5a137de327a5c4a7a5689396a2a1925b062a"} Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.136148 4605 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"09ecc57bc8186c325ef0bd38c17882024dda3714b52f9b0295a75ed93b2d6736"} Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.136154 4605 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"56f2691df87d1c843a73c06c39c7ad1d5477fb1f10c6f3f797f658e7d0c0d04d"} Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.136160 4605 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"98143f416c776bf79ee24b42b70d88d1cbe4528bd7a1c0fc886a8f6e711c6400"} Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.136166 4605 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c8442849a4d37d618e701a2ea4c1dd33944894765c874fd5635c74d37a7f411b"} Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.136488 4605 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6"} Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.136503 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" event={"ID":"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae","Type":"ContainerDied","Data":"c8442849a4d37d618e701a2ea4c1dd33944894765c874fd5635c74d37a7f411b"} Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.136516 4605 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"250086e7d7b8cdd7761b7001f3eca6b6ea1a3008cffc0778f2c1b6c092578ae4"} Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.136522 4605 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0be438218d8ebb0fffe42a99bb838e7aa749b7749c1ca18fb294cecb4fe8b4e4"} Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.136528 4605 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2d1219f1caadb9f0992fa56b158a23ace3616d178360c4690657deb351a3ccaa"} Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.136533 4605 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8f6f39aa4d204b41067d2705ce828e79752ca4e5b7689821e34c98f5041ce925"} Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.136538 4605 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ddba08349bbb0f7040bb04f37d4b5a137de327a5c4a7a5689396a2a1925b062a"} Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.136543 4605 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"09ecc57bc8186c325ef0bd38c17882024dda3714b52f9b0295a75ed93b2d6736"} Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.136565 4605 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"56f2691df87d1c843a73c06c39c7ad1d5477fb1f10c6f3f797f658e7d0c0d04d"} Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.136570 4605 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"98143f416c776bf79ee24b42b70d88d1cbe4528bd7a1c0fc886a8f6e711c6400"} Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.136578 4605 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c8442849a4d37d618e701a2ea4c1dd33944894765c874fd5635c74d37a7f411b"} Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.136583 4605 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6"} Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.136589 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" event={"ID":"e0b90c02-c41c-4f5b-ae0a-c6444435a3ae","Type":"ContainerDied","Data":"7a49db42d9607f1121da89e018d193ad2e829f595e9c5a64ccf244533ed09162"} Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.136598 4605 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"250086e7d7b8cdd7761b7001f3eca6b6ea1a3008cffc0778f2c1b6c092578ae4"} Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.136603 4605 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0be438218d8ebb0fffe42a99bb838e7aa749b7749c1ca18fb294cecb4fe8b4e4"} Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.136608 4605 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2d1219f1caadb9f0992fa56b158a23ace3616d178360c4690657deb351a3ccaa"} Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.136613 4605 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8f6f39aa4d204b41067d2705ce828e79752ca4e5b7689821e34c98f5041ce925"} Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.136618 4605 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ddba08349bbb0f7040bb04f37d4b5a137de327a5c4a7a5689396a2a1925b062a"} Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.136623 4605 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"09ecc57bc8186c325ef0bd38c17882024dda3714b52f9b0295a75ed93b2d6736"} Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.136644 4605 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"56f2691df87d1c843a73c06c39c7ad1d5477fb1f10c6f3f797f658e7d0c0d04d"} Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.136649 4605 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"98143f416c776bf79ee24b42b70d88d1cbe4528bd7a1c0fc886a8f6e711c6400"} Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.136654 4605 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c8442849a4d37d618e701a2ea4c1dd33944894765c874fd5635c74d37a7f411b"} Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.136658 4605 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6"} Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.136759 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-kzv4p" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.151550 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.173633 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-kzv4p"] Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.177617 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-kzv4p"] Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.182168 4605 scope.go:117] "RemoveContainer" containerID="250086e7d7b8cdd7761b7001f3eca6b6ea1a3008cffc0778f2c1b6c092578ae4" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.200064 4605 scope.go:117] "RemoveContainer" containerID="0be438218d8ebb0fffe42a99bb838e7aa749b7749c1ca18fb294cecb4fe8b4e4" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.217182 4605 scope.go:117] "RemoveContainer" containerID="2d1219f1caadb9f0992fa56b158a23ace3616d178360c4690657deb351a3ccaa" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.239695 4605 scope.go:117] "RemoveContainer" containerID="8f6f39aa4d204b41067d2705ce828e79752ca4e5b7689821e34c98f5041ce925" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.254027 4605 scope.go:117] "RemoveContainer" containerID="ddba08349bbb0f7040bb04f37d4b5a137de327a5c4a7a5689396a2a1925b062a" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.268224 4605 scope.go:117] "RemoveContainer" containerID="09ecc57bc8186c325ef0bd38c17882024dda3714b52f9b0295a75ed93b2d6736" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.281448 4605 scope.go:117] "RemoveContainer" containerID="56f2691df87d1c843a73c06c39c7ad1d5477fb1f10c6f3f797f658e7d0c0d04d" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.296328 4605 scope.go:117] "RemoveContainer" containerID="98143f416c776bf79ee24b42b70d88d1cbe4528bd7a1c0fc886a8f6e711c6400" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.313369 4605 scope.go:117] "RemoveContainer" containerID="c8442849a4d37d618e701a2ea4c1dd33944894765c874fd5635c74d37a7f411b" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.357315 4605 scope.go:117] "RemoveContainer" containerID="a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.369900 4605 scope.go:117] "RemoveContainer" containerID="250086e7d7b8cdd7761b7001f3eca6b6ea1a3008cffc0778f2c1b6c092578ae4" Oct 01 13:55:33 crc kubenswrapper[4605]: E1001 13:55:33.370281 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"250086e7d7b8cdd7761b7001f3eca6b6ea1a3008cffc0778f2c1b6c092578ae4\": container with ID starting with 250086e7d7b8cdd7761b7001f3eca6b6ea1a3008cffc0778f2c1b6c092578ae4 not found: ID does not exist" containerID="250086e7d7b8cdd7761b7001f3eca6b6ea1a3008cffc0778f2c1b6c092578ae4" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.370313 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"250086e7d7b8cdd7761b7001f3eca6b6ea1a3008cffc0778f2c1b6c092578ae4"} err="failed to get container status \"250086e7d7b8cdd7761b7001f3eca6b6ea1a3008cffc0778f2c1b6c092578ae4\": rpc error: code = NotFound desc = could not find container \"250086e7d7b8cdd7761b7001f3eca6b6ea1a3008cffc0778f2c1b6c092578ae4\": container with ID starting with 250086e7d7b8cdd7761b7001f3eca6b6ea1a3008cffc0778f2c1b6c092578ae4 not found: ID does not exist" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.370332 4605 scope.go:117] "RemoveContainer" containerID="0be438218d8ebb0fffe42a99bb838e7aa749b7749c1ca18fb294cecb4fe8b4e4" Oct 01 13:55:33 crc kubenswrapper[4605]: E1001 13:55:33.370671 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0be438218d8ebb0fffe42a99bb838e7aa749b7749c1ca18fb294cecb4fe8b4e4\": container with ID starting with 0be438218d8ebb0fffe42a99bb838e7aa749b7749c1ca18fb294cecb4fe8b4e4 not found: ID does not exist" containerID="0be438218d8ebb0fffe42a99bb838e7aa749b7749c1ca18fb294cecb4fe8b4e4" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.370690 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0be438218d8ebb0fffe42a99bb838e7aa749b7749c1ca18fb294cecb4fe8b4e4"} err="failed to get container status \"0be438218d8ebb0fffe42a99bb838e7aa749b7749c1ca18fb294cecb4fe8b4e4\": rpc error: code = NotFound desc = could not find container \"0be438218d8ebb0fffe42a99bb838e7aa749b7749c1ca18fb294cecb4fe8b4e4\": container with ID starting with 0be438218d8ebb0fffe42a99bb838e7aa749b7749c1ca18fb294cecb4fe8b4e4 not found: ID does not exist" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.370703 4605 scope.go:117] "RemoveContainer" containerID="2d1219f1caadb9f0992fa56b158a23ace3616d178360c4690657deb351a3ccaa" Oct 01 13:55:33 crc kubenswrapper[4605]: E1001 13:55:33.371035 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2d1219f1caadb9f0992fa56b158a23ace3616d178360c4690657deb351a3ccaa\": container with ID starting with 2d1219f1caadb9f0992fa56b158a23ace3616d178360c4690657deb351a3ccaa not found: ID does not exist" containerID="2d1219f1caadb9f0992fa56b158a23ace3616d178360c4690657deb351a3ccaa" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.371081 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2d1219f1caadb9f0992fa56b158a23ace3616d178360c4690657deb351a3ccaa"} err="failed to get container status \"2d1219f1caadb9f0992fa56b158a23ace3616d178360c4690657deb351a3ccaa\": rpc error: code = NotFound desc = could not find container \"2d1219f1caadb9f0992fa56b158a23ace3616d178360c4690657deb351a3ccaa\": container with ID starting with 2d1219f1caadb9f0992fa56b158a23ace3616d178360c4690657deb351a3ccaa not found: ID does not exist" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.371147 4605 scope.go:117] "RemoveContainer" containerID="8f6f39aa4d204b41067d2705ce828e79752ca4e5b7689821e34c98f5041ce925" Oct 01 13:55:33 crc kubenswrapper[4605]: E1001 13:55:33.371411 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8f6f39aa4d204b41067d2705ce828e79752ca4e5b7689821e34c98f5041ce925\": container with ID starting with 8f6f39aa4d204b41067d2705ce828e79752ca4e5b7689821e34c98f5041ce925 not found: ID does not exist" containerID="8f6f39aa4d204b41067d2705ce828e79752ca4e5b7689821e34c98f5041ce925" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.371431 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8f6f39aa4d204b41067d2705ce828e79752ca4e5b7689821e34c98f5041ce925"} err="failed to get container status \"8f6f39aa4d204b41067d2705ce828e79752ca4e5b7689821e34c98f5041ce925\": rpc error: code = NotFound desc = could not find container \"8f6f39aa4d204b41067d2705ce828e79752ca4e5b7689821e34c98f5041ce925\": container with ID starting with 8f6f39aa4d204b41067d2705ce828e79752ca4e5b7689821e34c98f5041ce925 not found: ID does not exist" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.371444 4605 scope.go:117] "RemoveContainer" containerID="ddba08349bbb0f7040bb04f37d4b5a137de327a5c4a7a5689396a2a1925b062a" Oct 01 13:55:33 crc kubenswrapper[4605]: E1001 13:55:33.371644 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ddba08349bbb0f7040bb04f37d4b5a137de327a5c4a7a5689396a2a1925b062a\": container with ID starting with ddba08349bbb0f7040bb04f37d4b5a137de327a5c4a7a5689396a2a1925b062a not found: ID does not exist" containerID="ddba08349bbb0f7040bb04f37d4b5a137de327a5c4a7a5689396a2a1925b062a" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.371668 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ddba08349bbb0f7040bb04f37d4b5a137de327a5c4a7a5689396a2a1925b062a"} err="failed to get container status \"ddba08349bbb0f7040bb04f37d4b5a137de327a5c4a7a5689396a2a1925b062a\": rpc error: code = NotFound desc = could not find container \"ddba08349bbb0f7040bb04f37d4b5a137de327a5c4a7a5689396a2a1925b062a\": container with ID starting with ddba08349bbb0f7040bb04f37d4b5a137de327a5c4a7a5689396a2a1925b062a not found: ID does not exist" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.371682 4605 scope.go:117] "RemoveContainer" containerID="09ecc57bc8186c325ef0bd38c17882024dda3714b52f9b0295a75ed93b2d6736" Oct 01 13:55:33 crc kubenswrapper[4605]: E1001 13:55:33.371906 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"09ecc57bc8186c325ef0bd38c17882024dda3714b52f9b0295a75ed93b2d6736\": container with ID starting with 09ecc57bc8186c325ef0bd38c17882024dda3714b52f9b0295a75ed93b2d6736 not found: ID does not exist" containerID="09ecc57bc8186c325ef0bd38c17882024dda3714b52f9b0295a75ed93b2d6736" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.371925 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"09ecc57bc8186c325ef0bd38c17882024dda3714b52f9b0295a75ed93b2d6736"} err="failed to get container status \"09ecc57bc8186c325ef0bd38c17882024dda3714b52f9b0295a75ed93b2d6736\": rpc error: code = NotFound desc = could not find container \"09ecc57bc8186c325ef0bd38c17882024dda3714b52f9b0295a75ed93b2d6736\": container with ID starting with 09ecc57bc8186c325ef0bd38c17882024dda3714b52f9b0295a75ed93b2d6736 not found: ID does not exist" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.371938 4605 scope.go:117] "RemoveContainer" containerID="56f2691df87d1c843a73c06c39c7ad1d5477fb1f10c6f3f797f658e7d0c0d04d" Oct 01 13:55:33 crc kubenswrapper[4605]: E1001 13:55:33.373165 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"56f2691df87d1c843a73c06c39c7ad1d5477fb1f10c6f3f797f658e7d0c0d04d\": container with ID starting with 56f2691df87d1c843a73c06c39c7ad1d5477fb1f10c6f3f797f658e7d0c0d04d not found: ID does not exist" containerID="56f2691df87d1c843a73c06c39c7ad1d5477fb1f10c6f3f797f658e7d0c0d04d" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.373198 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"56f2691df87d1c843a73c06c39c7ad1d5477fb1f10c6f3f797f658e7d0c0d04d"} err="failed to get container status \"56f2691df87d1c843a73c06c39c7ad1d5477fb1f10c6f3f797f658e7d0c0d04d\": rpc error: code = NotFound desc = could not find container \"56f2691df87d1c843a73c06c39c7ad1d5477fb1f10c6f3f797f658e7d0c0d04d\": container with ID starting with 56f2691df87d1c843a73c06c39c7ad1d5477fb1f10c6f3f797f658e7d0c0d04d not found: ID does not exist" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.373218 4605 scope.go:117] "RemoveContainer" containerID="98143f416c776bf79ee24b42b70d88d1cbe4528bd7a1c0fc886a8f6e711c6400" Oct 01 13:55:33 crc kubenswrapper[4605]: E1001 13:55:33.373398 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"98143f416c776bf79ee24b42b70d88d1cbe4528bd7a1c0fc886a8f6e711c6400\": container with ID starting with 98143f416c776bf79ee24b42b70d88d1cbe4528bd7a1c0fc886a8f6e711c6400 not found: ID does not exist" containerID="98143f416c776bf79ee24b42b70d88d1cbe4528bd7a1c0fc886a8f6e711c6400" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.373419 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"98143f416c776bf79ee24b42b70d88d1cbe4528bd7a1c0fc886a8f6e711c6400"} err="failed to get container status \"98143f416c776bf79ee24b42b70d88d1cbe4528bd7a1c0fc886a8f6e711c6400\": rpc error: code = NotFound desc = could not find container \"98143f416c776bf79ee24b42b70d88d1cbe4528bd7a1c0fc886a8f6e711c6400\": container with ID starting with 98143f416c776bf79ee24b42b70d88d1cbe4528bd7a1c0fc886a8f6e711c6400 not found: ID does not exist" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.373436 4605 scope.go:117] "RemoveContainer" containerID="c8442849a4d37d618e701a2ea4c1dd33944894765c874fd5635c74d37a7f411b" Oct 01 13:55:33 crc kubenswrapper[4605]: E1001 13:55:33.373600 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c8442849a4d37d618e701a2ea4c1dd33944894765c874fd5635c74d37a7f411b\": container with ID starting with c8442849a4d37d618e701a2ea4c1dd33944894765c874fd5635c74d37a7f411b not found: ID does not exist" containerID="c8442849a4d37d618e701a2ea4c1dd33944894765c874fd5635c74d37a7f411b" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.373618 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c8442849a4d37d618e701a2ea4c1dd33944894765c874fd5635c74d37a7f411b"} err="failed to get container status \"c8442849a4d37d618e701a2ea4c1dd33944894765c874fd5635c74d37a7f411b\": rpc error: code = NotFound desc = could not find container \"c8442849a4d37d618e701a2ea4c1dd33944894765c874fd5635c74d37a7f411b\": container with ID starting with c8442849a4d37d618e701a2ea4c1dd33944894765c874fd5635c74d37a7f411b not found: ID does not exist" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.373630 4605 scope.go:117] "RemoveContainer" containerID="a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6" Oct 01 13:55:33 crc kubenswrapper[4605]: E1001 13:55:33.373817 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\": container with ID starting with a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6 not found: ID does not exist" containerID="a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.373836 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6"} err="failed to get container status \"a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\": rpc error: code = NotFound desc = could not find container \"a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\": container with ID starting with a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6 not found: ID does not exist" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.373847 4605 scope.go:117] "RemoveContainer" containerID="250086e7d7b8cdd7761b7001f3eca6b6ea1a3008cffc0778f2c1b6c092578ae4" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.373986 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"250086e7d7b8cdd7761b7001f3eca6b6ea1a3008cffc0778f2c1b6c092578ae4"} err="failed to get container status \"250086e7d7b8cdd7761b7001f3eca6b6ea1a3008cffc0778f2c1b6c092578ae4\": rpc error: code = NotFound desc = could not find container \"250086e7d7b8cdd7761b7001f3eca6b6ea1a3008cffc0778f2c1b6c092578ae4\": container with ID starting with 250086e7d7b8cdd7761b7001f3eca6b6ea1a3008cffc0778f2c1b6c092578ae4 not found: ID does not exist" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.374002 4605 scope.go:117] "RemoveContainer" containerID="0be438218d8ebb0fffe42a99bb838e7aa749b7749c1ca18fb294cecb4fe8b4e4" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.374154 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0be438218d8ebb0fffe42a99bb838e7aa749b7749c1ca18fb294cecb4fe8b4e4"} err="failed to get container status \"0be438218d8ebb0fffe42a99bb838e7aa749b7749c1ca18fb294cecb4fe8b4e4\": rpc error: code = NotFound desc = could not find container \"0be438218d8ebb0fffe42a99bb838e7aa749b7749c1ca18fb294cecb4fe8b4e4\": container with ID starting with 0be438218d8ebb0fffe42a99bb838e7aa749b7749c1ca18fb294cecb4fe8b4e4 not found: ID does not exist" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.374170 4605 scope.go:117] "RemoveContainer" containerID="2d1219f1caadb9f0992fa56b158a23ace3616d178360c4690657deb351a3ccaa" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.374302 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2d1219f1caadb9f0992fa56b158a23ace3616d178360c4690657deb351a3ccaa"} err="failed to get container status \"2d1219f1caadb9f0992fa56b158a23ace3616d178360c4690657deb351a3ccaa\": rpc error: code = NotFound desc = could not find container \"2d1219f1caadb9f0992fa56b158a23ace3616d178360c4690657deb351a3ccaa\": container with ID starting with 2d1219f1caadb9f0992fa56b158a23ace3616d178360c4690657deb351a3ccaa not found: ID does not exist" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.374320 4605 scope.go:117] "RemoveContainer" containerID="8f6f39aa4d204b41067d2705ce828e79752ca4e5b7689821e34c98f5041ce925" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.374475 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8f6f39aa4d204b41067d2705ce828e79752ca4e5b7689821e34c98f5041ce925"} err="failed to get container status \"8f6f39aa4d204b41067d2705ce828e79752ca4e5b7689821e34c98f5041ce925\": rpc error: code = NotFound desc = could not find container \"8f6f39aa4d204b41067d2705ce828e79752ca4e5b7689821e34c98f5041ce925\": container with ID starting with 8f6f39aa4d204b41067d2705ce828e79752ca4e5b7689821e34c98f5041ce925 not found: ID does not exist" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.374498 4605 scope.go:117] "RemoveContainer" containerID="ddba08349bbb0f7040bb04f37d4b5a137de327a5c4a7a5689396a2a1925b062a" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.374737 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ddba08349bbb0f7040bb04f37d4b5a137de327a5c4a7a5689396a2a1925b062a"} err="failed to get container status \"ddba08349bbb0f7040bb04f37d4b5a137de327a5c4a7a5689396a2a1925b062a\": rpc error: code = NotFound desc = could not find container \"ddba08349bbb0f7040bb04f37d4b5a137de327a5c4a7a5689396a2a1925b062a\": container with ID starting with ddba08349bbb0f7040bb04f37d4b5a137de327a5c4a7a5689396a2a1925b062a not found: ID does not exist" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.374755 4605 scope.go:117] "RemoveContainer" containerID="09ecc57bc8186c325ef0bd38c17882024dda3714b52f9b0295a75ed93b2d6736" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.374890 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"09ecc57bc8186c325ef0bd38c17882024dda3714b52f9b0295a75ed93b2d6736"} err="failed to get container status \"09ecc57bc8186c325ef0bd38c17882024dda3714b52f9b0295a75ed93b2d6736\": rpc error: code = NotFound desc = could not find container \"09ecc57bc8186c325ef0bd38c17882024dda3714b52f9b0295a75ed93b2d6736\": container with ID starting with 09ecc57bc8186c325ef0bd38c17882024dda3714b52f9b0295a75ed93b2d6736 not found: ID does not exist" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.374905 4605 scope.go:117] "RemoveContainer" containerID="56f2691df87d1c843a73c06c39c7ad1d5477fb1f10c6f3f797f658e7d0c0d04d" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.375190 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"56f2691df87d1c843a73c06c39c7ad1d5477fb1f10c6f3f797f658e7d0c0d04d"} err="failed to get container status \"56f2691df87d1c843a73c06c39c7ad1d5477fb1f10c6f3f797f658e7d0c0d04d\": rpc error: code = NotFound desc = could not find container \"56f2691df87d1c843a73c06c39c7ad1d5477fb1f10c6f3f797f658e7d0c0d04d\": container with ID starting with 56f2691df87d1c843a73c06c39c7ad1d5477fb1f10c6f3f797f658e7d0c0d04d not found: ID does not exist" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.375210 4605 scope.go:117] "RemoveContainer" containerID="98143f416c776bf79ee24b42b70d88d1cbe4528bd7a1c0fc886a8f6e711c6400" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.375824 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"98143f416c776bf79ee24b42b70d88d1cbe4528bd7a1c0fc886a8f6e711c6400"} err="failed to get container status \"98143f416c776bf79ee24b42b70d88d1cbe4528bd7a1c0fc886a8f6e711c6400\": rpc error: code = NotFound desc = could not find container \"98143f416c776bf79ee24b42b70d88d1cbe4528bd7a1c0fc886a8f6e711c6400\": container with ID starting with 98143f416c776bf79ee24b42b70d88d1cbe4528bd7a1c0fc886a8f6e711c6400 not found: ID does not exist" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.375846 4605 scope.go:117] "RemoveContainer" containerID="c8442849a4d37d618e701a2ea4c1dd33944894765c874fd5635c74d37a7f411b" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.378137 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c8442849a4d37d618e701a2ea4c1dd33944894765c874fd5635c74d37a7f411b"} err="failed to get container status \"c8442849a4d37d618e701a2ea4c1dd33944894765c874fd5635c74d37a7f411b\": rpc error: code = NotFound desc = could not find container \"c8442849a4d37d618e701a2ea4c1dd33944894765c874fd5635c74d37a7f411b\": container with ID starting with c8442849a4d37d618e701a2ea4c1dd33944894765c874fd5635c74d37a7f411b not found: ID does not exist" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.378155 4605 scope.go:117] "RemoveContainer" containerID="a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.378512 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6"} err="failed to get container status \"a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\": rpc error: code = NotFound desc = could not find container \"a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\": container with ID starting with a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6 not found: ID does not exist" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.378527 4605 scope.go:117] "RemoveContainer" containerID="250086e7d7b8cdd7761b7001f3eca6b6ea1a3008cffc0778f2c1b6c092578ae4" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.378754 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"250086e7d7b8cdd7761b7001f3eca6b6ea1a3008cffc0778f2c1b6c092578ae4"} err="failed to get container status \"250086e7d7b8cdd7761b7001f3eca6b6ea1a3008cffc0778f2c1b6c092578ae4\": rpc error: code = NotFound desc = could not find container \"250086e7d7b8cdd7761b7001f3eca6b6ea1a3008cffc0778f2c1b6c092578ae4\": container with ID starting with 250086e7d7b8cdd7761b7001f3eca6b6ea1a3008cffc0778f2c1b6c092578ae4 not found: ID does not exist" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.378766 4605 scope.go:117] "RemoveContainer" containerID="0be438218d8ebb0fffe42a99bb838e7aa749b7749c1ca18fb294cecb4fe8b4e4" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.379249 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0be438218d8ebb0fffe42a99bb838e7aa749b7749c1ca18fb294cecb4fe8b4e4"} err="failed to get container status \"0be438218d8ebb0fffe42a99bb838e7aa749b7749c1ca18fb294cecb4fe8b4e4\": rpc error: code = NotFound desc = could not find container \"0be438218d8ebb0fffe42a99bb838e7aa749b7749c1ca18fb294cecb4fe8b4e4\": container with ID starting with 0be438218d8ebb0fffe42a99bb838e7aa749b7749c1ca18fb294cecb4fe8b4e4 not found: ID does not exist" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.379271 4605 scope.go:117] "RemoveContainer" containerID="2d1219f1caadb9f0992fa56b158a23ace3616d178360c4690657deb351a3ccaa" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.379742 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2d1219f1caadb9f0992fa56b158a23ace3616d178360c4690657deb351a3ccaa"} err="failed to get container status \"2d1219f1caadb9f0992fa56b158a23ace3616d178360c4690657deb351a3ccaa\": rpc error: code = NotFound desc = could not find container \"2d1219f1caadb9f0992fa56b158a23ace3616d178360c4690657deb351a3ccaa\": container with ID starting with 2d1219f1caadb9f0992fa56b158a23ace3616d178360c4690657deb351a3ccaa not found: ID does not exist" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.379766 4605 scope.go:117] "RemoveContainer" containerID="8f6f39aa4d204b41067d2705ce828e79752ca4e5b7689821e34c98f5041ce925" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.379999 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8f6f39aa4d204b41067d2705ce828e79752ca4e5b7689821e34c98f5041ce925"} err="failed to get container status \"8f6f39aa4d204b41067d2705ce828e79752ca4e5b7689821e34c98f5041ce925\": rpc error: code = NotFound desc = could not find container \"8f6f39aa4d204b41067d2705ce828e79752ca4e5b7689821e34c98f5041ce925\": container with ID starting with 8f6f39aa4d204b41067d2705ce828e79752ca4e5b7689821e34c98f5041ce925 not found: ID does not exist" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.380026 4605 scope.go:117] "RemoveContainer" containerID="ddba08349bbb0f7040bb04f37d4b5a137de327a5c4a7a5689396a2a1925b062a" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.380382 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ddba08349bbb0f7040bb04f37d4b5a137de327a5c4a7a5689396a2a1925b062a"} err="failed to get container status \"ddba08349bbb0f7040bb04f37d4b5a137de327a5c4a7a5689396a2a1925b062a\": rpc error: code = NotFound desc = could not find container \"ddba08349bbb0f7040bb04f37d4b5a137de327a5c4a7a5689396a2a1925b062a\": container with ID starting with ddba08349bbb0f7040bb04f37d4b5a137de327a5c4a7a5689396a2a1925b062a not found: ID does not exist" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.380416 4605 scope.go:117] "RemoveContainer" containerID="09ecc57bc8186c325ef0bd38c17882024dda3714b52f9b0295a75ed93b2d6736" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.380647 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"09ecc57bc8186c325ef0bd38c17882024dda3714b52f9b0295a75ed93b2d6736"} err="failed to get container status \"09ecc57bc8186c325ef0bd38c17882024dda3714b52f9b0295a75ed93b2d6736\": rpc error: code = NotFound desc = could not find container \"09ecc57bc8186c325ef0bd38c17882024dda3714b52f9b0295a75ed93b2d6736\": container with ID starting with 09ecc57bc8186c325ef0bd38c17882024dda3714b52f9b0295a75ed93b2d6736 not found: ID does not exist" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.380660 4605 scope.go:117] "RemoveContainer" containerID="56f2691df87d1c843a73c06c39c7ad1d5477fb1f10c6f3f797f658e7d0c0d04d" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.380853 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"56f2691df87d1c843a73c06c39c7ad1d5477fb1f10c6f3f797f658e7d0c0d04d"} err="failed to get container status \"56f2691df87d1c843a73c06c39c7ad1d5477fb1f10c6f3f797f658e7d0c0d04d\": rpc error: code = NotFound desc = could not find container \"56f2691df87d1c843a73c06c39c7ad1d5477fb1f10c6f3f797f658e7d0c0d04d\": container with ID starting with 56f2691df87d1c843a73c06c39c7ad1d5477fb1f10c6f3f797f658e7d0c0d04d not found: ID does not exist" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.380866 4605 scope.go:117] "RemoveContainer" containerID="98143f416c776bf79ee24b42b70d88d1cbe4528bd7a1c0fc886a8f6e711c6400" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.381052 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"98143f416c776bf79ee24b42b70d88d1cbe4528bd7a1c0fc886a8f6e711c6400"} err="failed to get container status \"98143f416c776bf79ee24b42b70d88d1cbe4528bd7a1c0fc886a8f6e711c6400\": rpc error: code = NotFound desc = could not find container \"98143f416c776bf79ee24b42b70d88d1cbe4528bd7a1c0fc886a8f6e711c6400\": container with ID starting with 98143f416c776bf79ee24b42b70d88d1cbe4528bd7a1c0fc886a8f6e711c6400 not found: ID does not exist" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.381064 4605 scope.go:117] "RemoveContainer" containerID="c8442849a4d37d618e701a2ea4c1dd33944894765c874fd5635c74d37a7f411b" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.381300 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c8442849a4d37d618e701a2ea4c1dd33944894765c874fd5635c74d37a7f411b"} err="failed to get container status \"c8442849a4d37d618e701a2ea4c1dd33944894765c874fd5635c74d37a7f411b\": rpc error: code = NotFound desc = could not find container \"c8442849a4d37d618e701a2ea4c1dd33944894765c874fd5635c74d37a7f411b\": container with ID starting with c8442849a4d37d618e701a2ea4c1dd33944894765c874fd5635c74d37a7f411b not found: ID does not exist" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.381313 4605 scope.go:117] "RemoveContainer" containerID="a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.382545 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6"} err="failed to get container status \"a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\": rpc error: code = NotFound desc = could not find container \"a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\": container with ID starting with a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6 not found: ID does not exist" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.382565 4605 scope.go:117] "RemoveContainer" containerID="250086e7d7b8cdd7761b7001f3eca6b6ea1a3008cffc0778f2c1b6c092578ae4" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.385055 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"250086e7d7b8cdd7761b7001f3eca6b6ea1a3008cffc0778f2c1b6c092578ae4"} err="failed to get container status \"250086e7d7b8cdd7761b7001f3eca6b6ea1a3008cffc0778f2c1b6c092578ae4\": rpc error: code = NotFound desc = could not find container \"250086e7d7b8cdd7761b7001f3eca6b6ea1a3008cffc0778f2c1b6c092578ae4\": container with ID starting with 250086e7d7b8cdd7761b7001f3eca6b6ea1a3008cffc0778f2c1b6c092578ae4 not found: ID does not exist" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.385106 4605 scope.go:117] "RemoveContainer" containerID="0be438218d8ebb0fffe42a99bb838e7aa749b7749c1ca18fb294cecb4fe8b4e4" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.385551 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0be438218d8ebb0fffe42a99bb838e7aa749b7749c1ca18fb294cecb4fe8b4e4"} err="failed to get container status \"0be438218d8ebb0fffe42a99bb838e7aa749b7749c1ca18fb294cecb4fe8b4e4\": rpc error: code = NotFound desc = could not find container \"0be438218d8ebb0fffe42a99bb838e7aa749b7749c1ca18fb294cecb4fe8b4e4\": container with ID starting with 0be438218d8ebb0fffe42a99bb838e7aa749b7749c1ca18fb294cecb4fe8b4e4 not found: ID does not exist" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.385575 4605 scope.go:117] "RemoveContainer" containerID="2d1219f1caadb9f0992fa56b158a23ace3616d178360c4690657deb351a3ccaa" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.387536 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2d1219f1caadb9f0992fa56b158a23ace3616d178360c4690657deb351a3ccaa"} err="failed to get container status \"2d1219f1caadb9f0992fa56b158a23ace3616d178360c4690657deb351a3ccaa\": rpc error: code = NotFound desc = could not find container \"2d1219f1caadb9f0992fa56b158a23ace3616d178360c4690657deb351a3ccaa\": container with ID starting with 2d1219f1caadb9f0992fa56b158a23ace3616d178360c4690657deb351a3ccaa not found: ID does not exist" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.387556 4605 scope.go:117] "RemoveContainer" containerID="8f6f39aa4d204b41067d2705ce828e79752ca4e5b7689821e34c98f5041ce925" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.387810 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8f6f39aa4d204b41067d2705ce828e79752ca4e5b7689821e34c98f5041ce925"} err="failed to get container status \"8f6f39aa4d204b41067d2705ce828e79752ca4e5b7689821e34c98f5041ce925\": rpc error: code = NotFound desc = could not find container \"8f6f39aa4d204b41067d2705ce828e79752ca4e5b7689821e34c98f5041ce925\": container with ID starting with 8f6f39aa4d204b41067d2705ce828e79752ca4e5b7689821e34c98f5041ce925 not found: ID does not exist" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.387828 4605 scope.go:117] "RemoveContainer" containerID="ddba08349bbb0f7040bb04f37d4b5a137de327a5c4a7a5689396a2a1925b062a" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.388439 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ddba08349bbb0f7040bb04f37d4b5a137de327a5c4a7a5689396a2a1925b062a"} err="failed to get container status \"ddba08349bbb0f7040bb04f37d4b5a137de327a5c4a7a5689396a2a1925b062a\": rpc error: code = NotFound desc = could not find container \"ddba08349bbb0f7040bb04f37d4b5a137de327a5c4a7a5689396a2a1925b062a\": container with ID starting with ddba08349bbb0f7040bb04f37d4b5a137de327a5c4a7a5689396a2a1925b062a not found: ID does not exist" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.388464 4605 scope.go:117] "RemoveContainer" containerID="09ecc57bc8186c325ef0bd38c17882024dda3714b52f9b0295a75ed93b2d6736" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.388677 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"09ecc57bc8186c325ef0bd38c17882024dda3714b52f9b0295a75ed93b2d6736"} err="failed to get container status \"09ecc57bc8186c325ef0bd38c17882024dda3714b52f9b0295a75ed93b2d6736\": rpc error: code = NotFound desc = could not find container \"09ecc57bc8186c325ef0bd38c17882024dda3714b52f9b0295a75ed93b2d6736\": container with ID starting with 09ecc57bc8186c325ef0bd38c17882024dda3714b52f9b0295a75ed93b2d6736 not found: ID does not exist" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.388704 4605 scope.go:117] "RemoveContainer" containerID="56f2691df87d1c843a73c06c39c7ad1d5477fb1f10c6f3f797f658e7d0c0d04d" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.388985 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"56f2691df87d1c843a73c06c39c7ad1d5477fb1f10c6f3f797f658e7d0c0d04d"} err="failed to get container status \"56f2691df87d1c843a73c06c39c7ad1d5477fb1f10c6f3f797f658e7d0c0d04d\": rpc error: code = NotFound desc = could not find container \"56f2691df87d1c843a73c06c39c7ad1d5477fb1f10c6f3f797f658e7d0c0d04d\": container with ID starting with 56f2691df87d1c843a73c06c39c7ad1d5477fb1f10c6f3f797f658e7d0c0d04d not found: ID does not exist" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.389004 4605 scope.go:117] "RemoveContainer" containerID="98143f416c776bf79ee24b42b70d88d1cbe4528bd7a1c0fc886a8f6e711c6400" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.389292 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"98143f416c776bf79ee24b42b70d88d1cbe4528bd7a1c0fc886a8f6e711c6400"} err="failed to get container status \"98143f416c776bf79ee24b42b70d88d1cbe4528bd7a1c0fc886a8f6e711c6400\": rpc error: code = NotFound desc = could not find container \"98143f416c776bf79ee24b42b70d88d1cbe4528bd7a1c0fc886a8f6e711c6400\": container with ID starting with 98143f416c776bf79ee24b42b70d88d1cbe4528bd7a1c0fc886a8f6e711c6400 not found: ID does not exist" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.389311 4605 scope.go:117] "RemoveContainer" containerID="c8442849a4d37d618e701a2ea4c1dd33944894765c874fd5635c74d37a7f411b" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.389562 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c8442849a4d37d618e701a2ea4c1dd33944894765c874fd5635c74d37a7f411b"} err="failed to get container status \"c8442849a4d37d618e701a2ea4c1dd33944894765c874fd5635c74d37a7f411b\": rpc error: code = NotFound desc = could not find container \"c8442849a4d37d618e701a2ea4c1dd33944894765c874fd5635c74d37a7f411b\": container with ID starting with c8442849a4d37d618e701a2ea4c1dd33944894765c874fd5635c74d37a7f411b not found: ID does not exist" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.389610 4605 scope.go:117] "RemoveContainer" containerID="a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.389806 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6"} err="failed to get container status \"a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\": rpc error: code = NotFound desc = could not find container \"a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6\": container with ID starting with a5b0fcaa538db338634eb0ee99823d4a7bd40ed764bc544c95c114f49fcafdc6 not found: ID does not exist" Oct 01 13:55:33 crc kubenswrapper[4605]: I1001 13:55:33.932300 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e0b90c02-c41c-4f5b-ae0a-c6444435a3ae" path="/var/lib/kubelet/pods/e0b90c02-c41c-4f5b-ae0a-c6444435a3ae/volumes" Oct 01 13:55:34 crc kubenswrapper[4605]: I1001 13:55:34.141662 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-wgx5p_1c2ca71f-4cb0-4852-927d-af69be5d77f2/kube-multus/2.log" Oct 01 13:55:34 crc kubenswrapper[4605]: I1001 13:55:34.142879 4605 generic.go:334] "Generic (PLEG): container finished" podID="33464318-eae4-4013-8a6d-3bbaaf2904ef" containerID="547e50fd0f0df456f0c9e2d0e7bbebe0deb771226b55baf0d2cf0906c1a253ee" exitCode=0 Oct 01 13:55:34 crc kubenswrapper[4605]: I1001 13:55:34.142930 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" event={"ID":"33464318-eae4-4013-8a6d-3bbaaf2904ef","Type":"ContainerDied","Data":"547e50fd0f0df456f0c9e2d0e7bbebe0deb771226b55baf0d2cf0906c1a253ee"} Oct 01 13:55:34 crc kubenswrapper[4605]: I1001 13:55:34.142995 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" event={"ID":"33464318-eae4-4013-8a6d-3bbaaf2904ef","Type":"ContainerStarted","Data":"a8a47d92b9848ecc49ad88cf207961504cb56dd45c46b2205bce8dc233577a41"} Oct 01 13:55:35 crc kubenswrapper[4605]: I1001 13:55:35.151872 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" event={"ID":"33464318-eae4-4013-8a6d-3bbaaf2904ef","Type":"ContainerStarted","Data":"e72b178ca07ac156c0b144236e9e42a4ca9ce661591da0991b3c651b8eaf58f2"} Oct 01 13:55:35 crc kubenswrapper[4605]: I1001 13:55:35.152227 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" event={"ID":"33464318-eae4-4013-8a6d-3bbaaf2904ef","Type":"ContainerStarted","Data":"ab497cf5feaade7d10010e25334571f51bb6ca00e743de96d4fe4c62439b8cde"} Oct 01 13:55:35 crc kubenswrapper[4605]: I1001 13:55:35.152239 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" event={"ID":"33464318-eae4-4013-8a6d-3bbaaf2904ef","Type":"ContainerStarted","Data":"9433f3b3423c4b40af6bcdfc292db77a6b8c173d11434efd04bcc830af0d0b43"} Oct 01 13:55:35 crc kubenswrapper[4605]: I1001 13:55:35.152247 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" event={"ID":"33464318-eae4-4013-8a6d-3bbaaf2904ef","Type":"ContainerStarted","Data":"14578ff433d4d4182d41edc66aa819cd4bbd6ce8106be26f61236eb02befbaa1"} Oct 01 13:55:36 crc kubenswrapper[4605]: I1001 13:55:36.160643 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" event={"ID":"33464318-eae4-4013-8a6d-3bbaaf2904ef","Type":"ContainerStarted","Data":"437a89e21653ffb53b1e20290c82c0dce31c3c0f34fbf983990be46ae590d39e"} Oct 01 13:55:36 crc kubenswrapper[4605]: I1001 13:55:36.160995 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" event={"ID":"33464318-eae4-4013-8a6d-3bbaaf2904ef","Type":"ContainerStarted","Data":"2aac76de30ed0f84f94ba0f24903469f94b06ef70445f3e09157741be0f50d7c"} Oct 01 13:55:38 crc kubenswrapper[4605]: I1001 13:55:38.176672 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" event={"ID":"33464318-eae4-4013-8a6d-3bbaaf2904ef","Type":"ContainerStarted","Data":"49e0955247a60604b725579ca59b0ae6d25eda7c82a451edcedb24423047f8c9"} Oct 01 13:55:40 crc kubenswrapper[4605]: I1001 13:55:40.192888 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" event={"ID":"33464318-eae4-4013-8a6d-3bbaaf2904ef","Type":"ContainerStarted","Data":"c5bec0d75facd8c5443a5a2a753fbe21b8cfe81a558c35b738c24be02d1c824d"} Oct 01 13:55:40 crc kubenswrapper[4605]: I1001 13:55:40.193281 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" Oct 01 13:55:40 crc kubenswrapper[4605]: I1001 13:55:40.228471 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" Oct 01 13:55:40 crc kubenswrapper[4605]: I1001 13:55:40.239073 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" podStartSLOduration=8.239057767 podStartE2EDuration="8.239057767s" podCreationTimestamp="2025-10-01 13:55:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 13:55:40.238678718 +0000 UTC m=+662.982654926" watchObservedRunningTime="2025-10-01 13:55:40.239057767 +0000 UTC m=+662.983033975" Oct 01 13:55:41 crc kubenswrapper[4605]: I1001 13:55:41.199367 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" Oct 01 13:55:41 crc kubenswrapper[4605]: I1001 13:55:41.199522 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" Oct 01 13:55:41 crc kubenswrapper[4605]: I1001 13:55:41.225618 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" Oct 01 13:55:45 crc kubenswrapper[4605]: I1001 13:55:45.927161 4605 scope.go:117] "RemoveContainer" containerID="ecb12288b76f5a9b7386a594a3210e745efc153651dcd926e6d3a04db0c2a2ee" Oct 01 13:55:45 crc kubenswrapper[4605]: E1001 13:55:45.927793 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-wgx5p_openshift-multus(1c2ca71f-4cb0-4852-927d-af69be5d77f2)\"" pod="openshift-multus/multus-wgx5p" podUID="1c2ca71f-4cb0-4852-927d-af69be5d77f2" Oct 01 13:55:57 crc kubenswrapper[4605]: I1001 13:55:57.933806 4605 scope.go:117] "RemoveContainer" containerID="ecb12288b76f5a9b7386a594a3210e745efc153651dcd926e6d3a04db0c2a2ee" Oct 01 13:55:58 crc kubenswrapper[4605]: I1001 13:55:58.285901 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-wgx5p_1c2ca71f-4cb0-4852-927d-af69be5d77f2/kube-multus/2.log" Oct 01 13:55:58 crc kubenswrapper[4605]: I1001 13:55:58.286318 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-wgx5p" event={"ID":"1c2ca71f-4cb0-4852-927d-af69be5d77f2","Type":"ContainerStarted","Data":"86c74fca398da57aa37844e4ee44a6e637191ad1c165ec2e9efdf32f8733f3a4"} Oct 01 13:56:03 crc kubenswrapper[4605]: I1001 13:56:03.172922 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-rtgh8" Oct 01 13:56:10 crc kubenswrapper[4605]: I1001 13:56:10.942376 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvrh9j"] Oct 01 13:56:10 crc kubenswrapper[4605]: I1001 13:56:10.944363 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvrh9j" Oct 01 13:56:10 crc kubenswrapper[4605]: I1001 13:56:10.946318 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Oct 01 13:56:10 crc kubenswrapper[4605]: I1001 13:56:10.988887 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvrh9j"] Oct 01 13:56:10 crc kubenswrapper[4605]: I1001 13:56:10.999089 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8542ad4d-301a-4957-ab3f-1c305ad1ff43-util\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvrh9j\" (UID: \"8542ad4d-301a-4957-ab3f-1c305ad1ff43\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvrh9j" Oct 01 13:56:10 crc kubenswrapper[4605]: I1001 13:56:10.999307 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8542ad4d-301a-4957-ab3f-1c305ad1ff43-bundle\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvrh9j\" (UID: \"8542ad4d-301a-4957-ab3f-1c305ad1ff43\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvrh9j" Oct 01 13:56:10 crc kubenswrapper[4605]: I1001 13:56:10.999383 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dzv6b\" (UniqueName: \"kubernetes.io/projected/8542ad4d-301a-4957-ab3f-1c305ad1ff43-kube-api-access-dzv6b\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvrh9j\" (UID: \"8542ad4d-301a-4957-ab3f-1c305ad1ff43\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvrh9j" Oct 01 13:56:11 crc kubenswrapper[4605]: I1001 13:56:11.100885 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8542ad4d-301a-4957-ab3f-1c305ad1ff43-bundle\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvrh9j\" (UID: \"8542ad4d-301a-4957-ab3f-1c305ad1ff43\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvrh9j" Oct 01 13:56:11 crc kubenswrapper[4605]: I1001 13:56:11.100937 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dzv6b\" (UniqueName: \"kubernetes.io/projected/8542ad4d-301a-4957-ab3f-1c305ad1ff43-kube-api-access-dzv6b\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvrh9j\" (UID: \"8542ad4d-301a-4957-ab3f-1c305ad1ff43\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvrh9j" Oct 01 13:56:11 crc kubenswrapper[4605]: I1001 13:56:11.101016 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8542ad4d-301a-4957-ab3f-1c305ad1ff43-util\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvrh9j\" (UID: \"8542ad4d-301a-4957-ab3f-1c305ad1ff43\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvrh9j" Oct 01 13:56:11 crc kubenswrapper[4605]: I1001 13:56:11.101395 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8542ad4d-301a-4957-ab3f-1c305ad1ff43-bundle\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvrh9j\" (UID: \"8542ad4d-301a-4957-ab3f-1c305ad1ff43\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvrh9j" Oct 01 13:56:11 crc kubenswrapper[4605]: I1001 13:56:11.101464 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8542ad4d-301a-4957-ab3f-1c305ad1ff43-util\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvrh9j\" (UID: \"8542ad4d-301a-4957-ab3f-1c305ad1ff43\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvrh9j" Oct 01 13:56:11 crc kubenswrapper[4605]: I1001 13:56:11.119562 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dzv6b\" (UniqueName: \"kubernetes.io/projected/8542ad4d-301a-4957-ab3f-1c305ad1ff43-kube-api-access-dzv6b\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvrh9j\" (UID: \"8542ad4d-301a-4957-ab3f-1c305ad1ff43\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvrh9j" Oct 01 13:56:11 crc kubenswrapper[4605]: I1001 13:56:11.261940 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvrh9j" Oct 01 13:56:11 crc kubenswrapper[4605]: I1001 13:56:11.488973 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvrh9j"] Oct 01 13:56:12 crc kubenswrapper[4605]: I1001 13:56:12.360028 4605 generic.go:334] "Generic (PLEG): container finished" podID="8542ad4d-301a-4957-ab3f-1c305ad1ff43" containerID="d323a7b1807e6ee0ae6c6f648ea550579fe675c6e764e6cd8a9f62072419d643" exitCode=0 Oct 01 13:56:12 crc kubenswrapper[4605]: I1001 13:56:12.360746 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvrh9j" event={"ID":"8542ad4d-301a-4957-ab3f-1c305ad1ff43","Type":"ContainerDied","Data":"d323a7b1807e6ee0ae6c6f648ea550579fe675c6e764e6cd8a9f62072419d643"} Oct 01 13:56:12 crc kubenswrapper[4605]: I1001 13:56:12.360809 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvrh9j" event={"ID":"8542ad4d-301a-4957-ab3f-1c305ad1ff43","Type":"ContainerStarted","Data":"7c862ae398e78c63991e5709b6e0d051e15b88ba3ee1feb78586c7cce3bfac33"} Oct 01 13:56:14 crc kubenswrapper[4605]: I1001 13:56:14.371705 4605 generic.go:334] "Generic (PLEG): container finished" podID="8542ad4d-301a-4957-ab3f-1c305ad1ff43" containerID="70d48d942bb393d864213aafe7fb4cc59796b2ff55f41feb2659ad5c35100a55" exitCode=0 Oct 01 13:56:14 crc kubenswrapper[4605]: I1001 13:56:14.371809 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvrh9j" event={"ID":"8542ad4d-301a-4957-ab3f-1c305ad1ff43","Type":"ContainerDied","Data":"70d48d942bb393d864213aafe7fb4cc59796b2ff55f41feb2659ad5c35100a55"} Oct 01 13:56:15 crc kubenswrapper[4605]: I1001 13:56:15.379266 4605 generic.go:334] "Generic (PLEG): container finished" podID="8542ad4d-301a-4957-ab3f-1c305ad1ff43" containerID="cde8cc4f7e1aaf3f8ecaef010bac493351af90e9bc28e82e8ae9ecd05e654cec" exitCode=0 Oct 01 13:56:15 crc kubenswrapper[4605]: I1001 13:56:15.379384 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvrh9j" event={"ID":"8542ad4d-301a-4957-ab3f-1c305ad1ff43","Type":"ContainerDied","Data":"cde8cc4f7e1aaf3f8ecaef010bac493351af90e9bc28e82e8ae9ecd05e654cec"} Oct 01 13:56:16 crc kubenswrapper[4605]: I1001 13:56:16.635826 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvrh9j" Oct 01 13:56:16 crc kubenswrapper[4605]: I1001 13:56:16.668505 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8542ad4d-301a-4957-ab3f-1c305ad1ff43-bundle\") pod \"8542ad4d-301a-4957-ab3f-1c305ad1ff43\" (UID: \"8542ad4d-301a-4957-ab3f-1c305ad1ff43\") " Oct 01 13:56:16 crc kubenswrapper[4605]: I1001 13:56:16.669340 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dzv6b\" (UniqueName: \"kubernetes.io/projected/8542ad4d-301a-4957-ab3f-1c305ad1ff43-kube-api-access-dzv6b\") pod \"8542ad4d-301a-4957-ab3f-1c305ad1ff43\" (UID: \"8542ad4d-301a-4957-ab3f-1c305ad1ff43\") " Oct 01 13:56:16 crc kubenswrapper[4605]: I1001 13:56:16.669401 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8542ad4d-301a-4957-ab3f-1c305ad1ff43-util\") pod \"8542ad4d-301a-4957-ab3f-1c305ad1ff43\" (UID: \"8542ad4d-301a-4957-ab3f-1c305ad1ff43\") " Oct 01 13:56:16 crc kubenswrapper[4605]: I1001 13:56:16.669348 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8542ad4d-301a-4957-ab3f-1c305ad1ff43-bundle" (OuterVolumeSpecName: "bundle") pod "8542ad4d-301a-4957-ab3f-1c305ad1ff43" (UID: "8542ad4d-301a-4957-ab3f-1c305ad1ff43"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 13:56:16 crc kubenswrapper[4605]: I1001 13:56:16.674096 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8542ad4d-301a-4957-ab3f-1c305ad1ff43-kube-api-access-dzv6b" (OuterVolumeSpecName: "kube-api-access-dzv6b") pod "8542ad4d-301a-4957-ab3f-1c305ad1ff43" (UID: "8542ad4d-301a-4957-ab3f-1c305ad1ff43"). InnerVolumeSpecName "kube-api-access-dzv6b". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:56:16 crc kubenswrapper[4605]: I1001 13:56:16.683182 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8542ad4d-301a-4957-ab3f-1c305ad1ff43-util" (OuterVolumeSpecName: "util") pod "8542ad4d-301a-4957-ab3f-1c305ad1ff43" (UID: "8542ad4d-301a-4957-ab3f-1c305ad1ff43"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 13:56:16 crc kubenswrapper[4605]: I1001 13:56:16.771331 4605 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8542ad4d-301a-4957-ab3f-1c305ad1ff43-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 13:56:16 crc kubenswrapper[4605]: I1001 13:56:16.771362 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dzv6b\" (UniqueName: \"kubernetes.io/projected/8542ad4d-301a-4957-ab3f-1c305ad1ff43-kube-api-access-dzv6b\") on node \"crc\" DevicePath \"\"" Oct 01 13:56:16 crc kubenswrapper[4605]: I1001 13:56:16.771377 4605 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8542ad4d-301a-4957-ab3f-1c305ad1ff43-util\") on node \"crc\" DevicePath \"\"" Oct 01 13:56:17 crc kubenswrapper[4605]: I1001 13:56:17.394683 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvrh9j" event={"ID":"8542ad4d-301a-4957-ab3f-1c305ad1ff43","Type":"ContainerDied","Data":"7c862ae398e78c63991e5709b6e0d051e15b88ba3ee1feb78586c7cce3bfac33"} Oct 01 13:56:17 crc kubenswrapper[4605]: I1001 13:56:17.394721 4605 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7c862ae398e78c63991e5709b6e0d051e15b88ba3ee1feb78586c7cce3bfac33" Oct 01 13:56:17 crc kubenswrapper[4605]: I1001 13:56:17.394748 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvrh9j" Oct 01 13:56:18 crc kubenswrapper[4605]: I1001 13:56:18.815023 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-5d6f6cfd66-mmf2p"] Oct 01 13:56:18 crc kubenswrapper[4605]: E1001 13:56:18.815315 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8542ad4d-301a-4957-ab3f-1c305ad1ff43" containerName="pull" Oct 01 13:56:18 crc kubenswrapper[4605]: I1001 13:56:18.815332 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="8542ad4d-301a-4957-ab3f-1c305ad1ff43" containerName="pull" Oct 01 13:56:18 crc kubenswrapper[4605]: E1001 13:56:18.815345 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8542ad4d-301a-4957-ab3f-1c305ad1ff43" containerName="extract" Oct 01 13:56:18 crc kubenswrapper[4605]: I1001 13:56:18.815352 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="8542ad4d-301a-4957-ab3f-1c305ad1ff43" containerName="extract" Oct 01 13:56:18 crc kubenswrapper[4605]: E1001 13:56:18.815363 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8542ad4d-301a-4957-ab3f-1c305ad1ff43" containerName="util" Oct 01 13:56:18 crc kubenswrapper[4605]: I1001 13:56:18.815370 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="8542ad4d-301a-4957-ab3f-1c305ad1ff43" containerName="util" Oct 01 13:56:18 crc kubenswrapper[4605]: I1001 13:56:18.815480 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="8542ad4d-301a-4957-ab3f-1c305ad1ff43" containerName="extract" Oct 01 13:56:18 crc kubenswrapper[4605]: I1001 13:56:18.815983 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-mmf2p" Oct 01 13:56:18 crc kubenswrapper[4605]: I1001 13:56:18.817504 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-grhkz" Oct 01 13:56:18 crc kubenswrapper[4605]: I1001 13:56:18.817671 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Oct 01 13:56:18 crc kubenswrapper[4605]: I1001 13:56:18.817928 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Oct 01 13:56:18 crc kubenswrapper[4605]: I1001 13:56:18.846319 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5d6f6cfd66-mmf2p"] Oct 01 13:56:18 crc kubenswrapper[4605]: I1001 13:56:18.896114 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9x5qx\" (UniqueName: \"kubernetes.io/projected/fc364b6e-b66d-4634-890a-f2eaed00901e-kube-api-access-9x5qx\") pod \"nmstate-operator-5d6f6cfd66-mmf2p\" (UID: \"fc364b6e-b66d-4634-890a-f2eaed00901e\") " pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-mmf2p" Oct 01 13:56:18 crc kubenswrapper[4605]: I1001 13:56:18.997533 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9x5qx\" (UniqueName: \"kubernetes.io/projected/fc364b6e-b66d-4634-890a-f2eaed00901e-kube-api-access-9x5qx\") pod \"nmstate-operator-5d6f6cfd66-mmf2p\" (UID: \"fc364b6e-b66d-4634-890a-f2eaed00901e\") " pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-mmf2p" Oct 01 13:56:19 crc kubenswrapper[4605]: I1001 13:56:19.012783 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9x5qx\" (UniqueName: \"kubernetes.io/projected/fc364b6e-b66d-4634-890a-f2eaed00901e-kube-api-access-9x5qx\") pod \"nmstate-operator-5d6f6cfd66-mmf2p\" (UID: \"fc364b6e-b66d-4634-890a-f2eaed00901e\") " pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-mmf2p" Oct 01 13:56:19 crc kubenswrapper[4605]: I1001 13:56:19.143617 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-mmf2p" Oct 01 13:56:19 crc kubenswrapper[4605]: I1001 13:56:19.336292 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5d6f6cfd66-mmf2p"] Oct 01 13:56:19 crc kubenswrapper[4605]: I1001 13:56:19.404815 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-mmf2p" event={"ID":"fc364b6e-b66d-4634-890a-f2eaed00901e","Type":"ContainerStarted","Data":"757c64585e7107bd3fc2705d0b80bc9e42f146214566d213a436841d3c9b9999"} Oct 01 13:56:22 crc kubenswrapper[4605]: I1001 13:56:22.424835 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-mmf2p" event={"ID":"fc364b6e-b66d-4634-890a-f2eaed00901e","Type":"ContainerStarted","Data":"3cc753f2aaefc2f2518829518db2d62e1d53ded2a59b9e6a89a9806d3da266d4"} Oct 01 13:56:22 crc kubenswrapper[4605]: I1001 13:56:22.450558 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-mmf2p" podStartSLOduration=2.204952031 podStartE2EDuration="4.45053842s" podCreationTimestamp="2025-10-01 13:56:18 +0000 UTC" firstStartedPulling="2025-10-01 13:56:19.361091497 +0000 UTC m=+702.105067705" lastFinishedPulling="2025-10-01 13:56:21.606677886 +0000 UTC m=+704.350654094" observedRunningTime="2025-10-01 13:56:22.446564139 +0000 UTC m=+705.190540347" watchObservedRunningTime="2025-10-01 13:56:22.45053842 +0000 UTC m=+705.194514638" Oct 01 13:56:23 crc kubenswrapper[4605]: I1001 13:56:23.356389 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-58fcddf996-k28lm"] Oct 01 13:56:23 crc kubenswrapper[4605]: I1001 13:56:23.357349 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-58fcddf996-k28lm" Oct 01 13:56:23 crc kubenswrapper[4605]: I1001 13:56:23.359253 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-58gzd" Oct 01 13:56:23 crc kubenswrapper[4605]: I1001 13:56:23.371244 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-58fcddf996-k28lm"] Oct 01 13:56:23 crc kubenswrapper[4605]: I1001 13:56:23.387347 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-6d689559c5-dgsw9"] Oct 01 13:56:23 crc kubenswrapper[4605]: I1001 13:56:23.388175 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6d689559c5-dgsw9" Oct 01 13:56:23 crc kubenswrapper[4605]: I1001 13:56:23.390113 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Oct 01 13:56:23 crc kubenswrapper[4605]: I1001 13:56:23.407438 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6d689559c5-dgsw9"] Oct 01 13:56:23 crc kubenswrapper[4605]: I1001 13:56:23.423669 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-txcp9"] Oct 01 13:56:23 crc kubenswrapper[4605]: I1001 13:56:23.424435 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-txcp9" Oct 01 13:56:23 crc kubenswrapper[4605]: I1001 13:56:23.449357 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/d9977c55-dbd0-45e6-8483-5b0e5a279566-nmstate-lock\") pod \"nmstate-handler-txcp9\" (UID: \"d9977c55-dbd0-45e6-8483-5b0e5a279566\") " pod="openshift-nmstate/nmstate-handler-txcp9" Oct 01 13:56:23 crc kubenswrapper[4605]: I1001 13:56:23.449720 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-27grv\" (UniqueName: \"kubernetes.io/projected/d9977c55-dbd0-45e6-8483-5b0e5a279566-kube-api-access-27grv\") pod \"nmstate-handler-txcp9\" (UID: \"d9977c55-dbd0-45e6-8483-5b0e5a279566\") " pod="openshift-nmstate/nmstate-handler-txcp9" Oct 01 13:56:23 crc kubenswrapper[4605]: I1001 13:56:23.449749 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n82pq\" (UniqueName: \"kubernetes.io/projected/f75cf276-8bcb-4ab4-bed4-d12b1252691f-kube-api-access-n82pq\") pod \"nmstate-webhook-6d689559c5-dgsw9\" (UID: \"f75cf276-8bcb-4ab4-bed4-d12b1252691f\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-dgsw9" Oct 01 13:56:23 crc kubenswrapper[4605]: I1001 13:56:23.454328 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/f75cf276-8bcb-4ab4-bed4-d12b1252691f-tls-key-pair\") pod \"nmstate-webhook-6d689559c5-dgsw9\" (UID: \"f75cf276-8bcb-4ab4-bed4-d12b1252691f\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-dgsw9" Oct 01 13:56:23 crc kubenswrapper[4605]: I1001 13:56:23.454450 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j9fqh\" (UniqueName: \"kubernetes.io/projected/8f826dfa-48c9-42ed-8f62-e3ae00653a07-kube-api-access-j9fqh\") pod \"nmstate-metrics-58fcddf996-k28lm\" (UID: \"8f826dfa-48c9-42ed-8f62-e3ae00653a07\") " pod="openshift-nmstate/nmstate-metrics-58fcddf996-k28lm" Oct 01 13:56:23 crc kubenswrapper[4605]: I1001 13:56:23.454532 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/d9977c55-dbd0-45e6-8483-5b0e5a279566-dbus-socket\") pod \"nmstate-handler-txcp9\" (UID: \"d9977c55-dbd0-45e6-8483-5b0e5a279566\") " pod="openshift-nmstate/nmstate-handler-txcp9" Oct 01 13:56:23 crc kubenswrapper[4605]: I1001 13:56:23.454672 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/d9977c55-dbd0-45e6-8483-5b0e5a279566-ovs-socket\") pod \"nmstate-handler-txcp9\" (UID: \"d9977c55-dbd0-45e6-8483-5b0e5a279566\") " pod="openshift-nmstate/nmstate-handler-txcp9" Oct 01 13:56:23 crc kubenswrapper[4605]: I1001 13:56:23.556133 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j9fqh\" (UniqueName: \"kubernetes.io/projected/8f826dfa-48c9-42ed-8f62-e3ae00653a07-kube-api-access-j9fqh\") pod \"nmstate-metrics-58fcddf996-k28lm\" (UID: \"8f826dfa-48c9-42ed-8f62-e3ae00653a07\") " pod="openshift-nmstate/nmstate-metrics-58fcddf996-k28lm" Oct 01 13:56:23 crc kubenswrapper[4605]: I1001 13:56:23.556200 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/d9977c55-dbd0-45e6-8483-5b0e5a279566-dbus-socket\") pod \"nmstate-handler-txcp9\" (UID: \"d9977c55-dbd0-45e6-8483-5b0e5a279566\") " pod="openshift-nmstate/nmstate-handler-txcp9" Oct 01 13:56:23 crc kubenswrapper[4605]: I1001 13:56:23.556254 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/d9977c55-dbd0-45e6-8483-5b0e5a279566-ovs-socket\") pod \"nmstate-handler-txcp9\" (UID: \"d9977c55-dbd0-45e6-8483-5b0e5a279566\") " pod="openshift-nmstate/nmstate-handler-txcp9" Oct 01 13:56:23 crc kubenswrapper[4605]: I1001 13:56:23.556292 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/d9977c55-dbd0-45e6-8483-5b0e5a279566-nmstate-lock\") pod \"nmstate-handler-txcp9\" (UID: \"d9977c55-dbd0-45e6-8483-5b0e5a279566\") " pod="openshift-nmstate/nmstate-handler-txcp9" Oct 01 13:56:23 crc kubenswrapper[4605]: I1001 13:56:23.556323 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-27grv\" (UniqueName: \"kubernetes.io/projected/d9977c55-dbd0-45e6-8483-5b0e5a279566-kube-api-access-27grv\") pod \"nmstate-handler-txcp9\" (UID: \"d9977c55-dbd0-45e6-8483-5b0e5a279566\") " pod="openshift-nmstate/nmstate-handler-txcp9" Oct 01 13:56:23 crc kubenswrapper[4605]: I1001 13:56:23.556350 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n82pq\" (UniqueName: \"kubernetes.io/projected/f75cf276-8bcb-4ab4-bed4-d12b1252691f-kube-api-access-n82pq\") pod \"nmstate-webhook-6d689559c5-dgsw9\" (UID: \"f75cf276-8bcb-4ab4-bed4-d12b1252691f\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-dgsw9" Oct 01 13:56:23 crc kubenswrapper[4605]: I1001 13:56:23.556371 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/f75cf276-8bcb-4ab4-bed4-d12b1252691f-tls-key-pair\") pod \"nmstate-webhook-6d689559c5-dgsw9\" (UID: \"f75cf276-8bcb-4ab4-bed4-d12b1252691f\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-dgsw9" Oct 01 13:56:23 crc kubenswrapper[4605]: E1001 13:56:23.556505 4605 secret.go:188] Couldn't get secret openshift-nmstate/openshift-nmstate-webhook: secret "openshift-nmstate-webhook" not found Oct 01 13:56:23 crc kubenswrapper[4605]: E1001 13:56:23.556563 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f75cf276-8bcb-4ab4-bed4-d12b1252691f-tls-key-pair podName:f75cf276-8bcb-4ab4-bed4-d12b1252691f nodeName:}" failed. No retries permitted until 2025-10-01 13:56:24.056542863 +0000 UTC m=+706.800519071 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-key-pair" (UniqueName: "kubernetes.io/secret/f75cf276-8bcb-4ab4-bed4-d12b1252691f-tls-key-pair") pod "nmstate-webhook-6d689559c5-dgsw9" (UID: "f75cf276-8bcb-4ab4-bed4-d12b1252691f") : secret "openshift-nmstate-webhook" not found Oct 01 13:56:23 crc kubenswrapper[4605]: I1001 13:56:23.556570 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/d9977c55-dbd0-45e6-8483-5b0e5a279566-nmstate-lock\") pod \"nmstate-handler-txcp9\" (UID: \"d9977c55-dbd0-45e6-8483-5b0e5a279566\") " pod="openshift-nmstate/nmstate-handler-txcp9" Oct 01 13:56:23 crc kubenswrapper[4605]: I1001 13:56:23.556738 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/d9977c55-dbd0-45e6-8483-5b0e5a279566-ovs-socket\") pod \"nmstate-handler-txcp9\" (UID: \"d9977c55-dbd0-45e6-8483-5b0e5a279566\") " pod="openshift-nmstate/nmstate-handler-txcp9" Oct 01 13:56:23 crc kubenswrapper[4605]: I1001 13:56:23.556833 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/d9977c55-dbd0-45e6-8483-5b0e5a279566-dbus-socket\") pod \"nmstate-handler-txcp9\" (UID: \"d9977c55-dbd0-45e6-8483-5b0e5a279566\") " pod="openshift-nmstate/nmstate-handler-txcp9" Oct 01 13:56:23 crc kubenswrapper[4605]: I1001 13:56:23.581502 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-27grv\" (UniqueName: \"kubernetes.io/projected/d9977c55-dbd0-45e6-8483-5b0e5a279566-kube-api-access-27grv\") pod \"nmstate-handler-txcp9\" (UID: \"d9977c55-dbd0-45e6-8483-5b0e5a279566\") " pod="openshift-nmstate/nmstate-handler-txcp9" Oct 01 13:56:23 crc kubenswrapper[4605]: I1001 13:56:23.593316 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j9fqh\" (UniqueName: \"kubernetes.io/projected/8f826dfa-48c9-42ed-8f62-e3ae00653a07-kube-api-access-j9fqh\") pod \"nmstate-metrics-58fcddf996-k28lm\" (UID: \"8f826dfa-48c9-42ed-8f62-e3ae00653a07\") " pod="openshift-nmstate/nmstate-metrics-58fcddf996-k28lm" Oct 01 13:56:23 crc kubenswrapper[4605]: I1001 13:56:23.599235 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n82pq\" (UniqueName: \"kubernetes.io/projected/f75cf276-8bcb-4ab4-bed4-d12b1252691f-kube-api-access-n82pq\") pod \"nmstate-webhook-6d689559c5-dgsw9\" (UID: \"f75cf276-8bcb-4ab4-bed4-d12b1252691f\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-dgsw9" Oct 01 13:56:23 crc kubenswrapper[4605]: I1001 13:56:23.642311 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-864bb6dfb5-w9zqn"] Oct 01 13:56:23 crc kubenswrapper[4605]: I1001 13:56:23.643004 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-w9zqn" Oct 01 13:56:23 crc kubenswrapper[4605]: I1001 13:56:23.648806 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Oct 01 13:56:23 crc kubenswrapper[4605]: I1001 13:56:23.649000 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Oct 01 13:56:23 crc kubenswrapper[4605]: I1001 13:56:23.649221 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-fcgb2" Oct 01 13:56:23 crc kubenswrapper[4605]: I1001 13:56:23.662882 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-864bb6dfb5-w9zqn"] Oct 01 13:56:23 crc kubenswrapper[4605]: I1001 13:56:23.670670 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-58fcddf996-k28lm" Oct 01 13:56:23 crc kubenswrapper[4605]: I1001 13:56:23.759315 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zdwpm\" (UniqueName: \"kubernetes.io/projected/cceecdf3-965b-4939-a871-628e73d1ce1e-kube-api-access-zdwpm\") pod \"nmstate-console-plugin-864bb6dfb5-w9zqn\" (UID: \"cceecdf3-965b-4939-a871-628e73d1ce1e\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-w9zqn" Oct 01 13:56:23 crc kubenswrapper[4605]: I1001 13:56:23.759666 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/cceecdf3-965b-4939-a871-628e73d1ce1e-plugin-serving-cert\") pod \"nmstate-console-plugin-864bb6dfb5-w9zqn\" (UID: \"cceecdf3-965b-4939-a871-628e73d1ce1e\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-w9zqn" Oct 01 13:56:23 crc kubenswrapper[4605]: I1001 13:56:23.759756 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/cceecdf3-965b-4939-a871-628e73d1ce1e-nginx-conf\") pod \"nmstate-console-plugin-864bb6dfb5-w9zqn\" (UID: \"cceecdf3-965b-4939-a871-628e73d1ce1e\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-w9zqn" Oct 01 13:56:23 crc kubenswrapper[4605]: I1001 13:56:23.769971 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-txcp9" Oct 01 13:56:23 crc kubenswrapper[4605]: I1001 13:56:23.828950 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-689bbd87fc-tjjrg"] Oct 01 13:56:23 crc kubenswrapper[4605]: I1001 13:56:23.829876 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-689bbd87fc-tjjrg" Oct 01 13:56:23 crc kubenswrapper[4605]: I1001 13:56:23.852312 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-689bbd87fc-tjjrg"] Oct 01 13:56:23 crc kubenswrapper[4605]: I1001 13:56:23.860931 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/d1d0c7bc-2c71-4a74-b787-660c2866e7b1-service-ca\") pod \"console-689bbd87fc-tjjrg\" (UID: \"d1d0c7bc-2c71-4a74-b787-660c2866e7b1\") " pod="openshift-console/console-689bbd87fc-tjjrg" Oct 01 13:56:23 crc kubenswrapper[4605]: I1001 13:56:23.861168 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/d1d0c7bc-2c71-4a74-b787-660c2866e7b1-console-oauth-config\") pod \"console-689bbd87fc-tjjrg\" (UID: \"d1d0c7bc-2c71-4a74-b787-660c2866e7b1\") " pod="openshift-console/console-689bbd87fc-tjjrg" Oct 01 13:56:23 crc kubenswrapper[4605]: I1001 13:56:23.861192 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/d1d0c7bc-2c71-4a74-b787-660c2866e7b1-oauth-serving-cert\") pod \"console-689bbd87fc-tjjrg\" (UID: \"d1d0c7bc-2c71-4a74-b787-660c2866e7b1\") " pod="openshift-console/console-689bbd87fc-tjjrg" Oct 01 13:56:23 crc kubenswrapper[4605]: I1001 13:56:23.861219 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d1d0c7bc-2c71-4a74-b787-660c2866e7b1-trusted-ca-bundle\") pod \"console-689bbd87fc-tjjrg\" (UID: \"d1d0c7bc-2c71-4a74-b787-660c2866e7b1\") " pod="openshift-console/console-689bbd87fc-tjjrg" Oct 01 13:56:23 crc kubenswrapper[4605]: I1001 13:56:23.861273 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4z822\" (UniqueName: \"kubernetes.io/projected/d1d0c7bc-2c71-4a74-b787-660c2866e7b1-kube-api-access-4z822\") pod \"console-689bbd87fc-tjjrg\" (UID: \"d1d0c7bc-2c71-4a74-b787-660c2866e7b1\") " pod="openshift-console/console-689bbd87fc-tjjrg" Oct 01 13:56:23 crc kubenswrapper[4605]: I1001 13:56:23.861310 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zdwpm\" (UniqueName: \"kubernetes.io/projected/cceecdf3-965b-4939-a871-628e73d1ce1e-kube-api-access-zdwpm\") pod \"nmstate-console-plugin-864bb6dfb5-w9zqn\" (UID: \"cceecdf3-965b-4939-a871-628e73d1ce1e\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-w9zqn" Oct 01 13:56:23 crc kubenswrapper[4605]: I1001 13:56:23.861347 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/d1d0c7bc-2c71-4a74-b787-660c2866e7b1-console-config\") pod \"console-689bbd87fc-tjjrg\" (UID: \"d1d0c7bc-2c71-4a74-b787-660c2866e7b1\") " pod="openshift-console/console-689bbd87fc-tjjrg" Oct 01 13:56:23 crc kubenswrapper[4605]: I1001 13:56:23.861372 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/d1d0c7bc-2c71-4a74-b787-660c2866e7b1-console-serving-cert\") pod \"console-689bbd87fc-tjjrg\" (UID: \"d1d0c7bc-2c71-4a74-b787-660c2866e7b1\") " pod="openshift-console/console-689bbd87fc-tjjrg" Oct 01 13:56:23 crc kubenswrapper[4605]: I1001 13:56:23.861393 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/cceecdf3-965b-4939-a871-628e73d1ce1e-plugin-serving-cert\") pod \"nmstate-console-plugin-864bb6dfb5-w9zqn\" (UID: \"cceecdf3-965b-4939-a871-628e73d1ce1e\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-w9zqn" Oct 01 13:56:23 crc kubenswrapper[4605]: I1001 13:56:23.861408 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/cceecdf3-965b-4939-a871-628e73d1ce1e-nginx-conf\") pod \"nmstate-console-plugin-864bb6dfb5-w9zqn\" (UID: \"cceecdf3-965b-4939-a871-628e73d1ce1e\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-w9zqn" Oct 01 13:56:23 crc kubenswrapper[4605]: I1001 13:56:23.862212 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/cceecdf3-965b-4939-a871-628e73d1ce1e-nginx-conf\") pod \"nmstate-console-plugin-864bb6dfb5-w9zqn\" (UID: \"cceecdf3-965b-4939-a871-628e73d1ce1e\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-w9zqn" Oct 01 13:56:23 crc kubenswrapper[4605]: E1001 13:56:23.862449 4605 secret.go:188] Couldn't get secret openshift-nmstate/plugin-serving-cert: secret "plugin-serving-cert" not found Oct 01 13:56:23 crc kubenswrapper[4605]: E1001 13:56:23.862481 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cceecdf3-965b-4939-a871-628e73d1ce1e-plugin-serving-cert podName:cceecdf3-965b-4939-a871-628e73d1ce1e nodeName:}" failed. No retries permitted until 2025-10-01 13:56:24.362471064 +0000 UTC m=+707.106447272 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "plugin-serving-cert" (UniqueName: "kubernetes.io/secret/cceecdf3-965b-4939-a871-628e73d1ce1e-plugin-serving-cert") pod "nmstate-console-plugin-864bb6dfb5-w9zqn" (UID: "cceecdf3-965b-4939-a871-628e73d1ce1e") : secret "plugin-serving-cert" not found Oct 01 13:56:23 crc kubenswrapper[4605]: I1001 13:56:23.889821 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zdwpm\" (UniqueName: \"kubernetes.io/projected/cceecdf3-965b-4939-a871-628e73d1ce1e-kube-api-access-zdwpm\") pod \"nmstate-console-plugin-864bb6dfb5-w9zqn\" (UID: \"cceecdf3-965b-4939-a871-628e73d1ce1e\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-w9zqn" Oct 01 13:56:23 crc kubenswrapper[4605]: I1001 13:56:23.962854 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/d1d0c7bc-2c71-4a74-b787-660c2866e7b1-console-config\") pod \"console-689bbd87fc-tjjrg\" (UID: \"d1d0c7bc-2c71-4a74-b787-660c2866e7b1\") " pod="openshift-console/console-689bbd87fc-tjjrg" Oct 01 13:56:23 crc kubenswrapper[4605]: I1001 13:56:23.962905 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/d1d0c7bc-2c71-4a74-b787-660c2866e7b1-console-serving-cert\") pod \"console-689bbd87fc-tjjrg\" (UID: \"d1d0c7bc-2c71-4a74-b787-660c2866e7b1\") " pod="openshift-console/console-689bbd87fc-tjjrg" Oct 01 13:56:23 crc kubenswrapper[4605]: I1001 13:56:23.962951 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/d1d0c7bc-2c71-4a74-b787-660c2866e7b1-service-ca\") pod \"console-689bbd87fc-tjjrg\" (UID: \"d1d0c7bc-2c71-4a74-b787-660c2866e7b1\") " pod="openshift-console/console-689bbd87fc-tjjrg" Oct 01 13:56:23 crc kubenswrapper[4605]: I1001 13:56:23.962966 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/d1d0c7bc-2c71-4a74-b787-660c2866e7b1-console-oauth-config\") pod \"console-689bbd87fc-tjjrg\" (UID: \"d1d0c7bc-2c71-4a74-b787-660c2866e7b1\") " pod="openshift-console/console-689bbd87fc-tjjrg" Oct 01 13:56:23 crc kubenswrapper[4605]: I1001 13:56:23.962987 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/d1d0c7bc-2c71-4a74-b787-660c2866e7b1-oauth-serving-cert\") pod \"console-689bbd87fc-tjjrg\" (UID: \"d1d0c7bc-2c71-4a74-b787-660c2866e7b1\") " pod="openshift-console/console-689bbd87fc-tjjrg" Oct 01 13:56:23 crc kubenswrapper[4605]: I1001 13:56:23.963004 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d1d0c7bc-2c71-4a74-b787-660c2866e7b1-trusted-ca-bundle\") pod \"console-689bbd87fc-tjjrg\" (UID: \"d1d0c7bc-2c71-4a74-b787-660c2866e7b1\") " pod="openshift-console/console-689bbd87fc-tjjrg" Oct 01 13:56:23 crc kubenswrapper[4605]: I1001 13:56:23.963036 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4z822\" (UniqueName: \"kubernetes.io/projected/d1d0c7bc-2c71-4a74-b787-660c2866e7b1-kube-api-access-4z822\") pod \"console-689bbd87fc-tjjrg\" (UID: \"d1d0c7bc-2c71-4a74-b787-660c2866e7b1\") " pod="openshift-console/console-689bbd87fc-tjjrg" Oct 01 13:56:23 crc kubenswrapper[4605]: I1001 13:56:23.964074 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/d1d0c7bc-2c71-4a74-b787-660c2866e7b1-console-config\") pod \"console-689bbd87fc-tjjrg\" (UID: \"d1d0c7bc-2c71-4a74-b787-660c2866e7b1\") " pod="openshift-console/console-689bbd87fc-tjjrg" Oct 01 13:56:23 crc kubenswrapper[4605]: I1001 13:56:23.965777 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/d1d0c7bc-2c71-4a74-b787-660c2866e7b1-service-ca\") pod \"console-689bbd87fc-tjjrg\" (UID: \"d1d0c7bc-2c71-4a74-b787-660c2866e7b1\") " pod="openshift-console/console-689bbd87fc-tjjrg" Oct 01 13:56:23 crc kubenswrapper[4605]: I1001 13:56:23.966893 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/d1d0c7bc-2c71-4a74-b787-660c2866e7b1-console-serving-cert\") pod \"console-689bbd87fc-tjjrg\" (UID: \"d1d0c7bc-2c71-4a74-b787-660c2866e7b1\") " pod="openshift-console/console-689bbd87fc-tjjrg" Oct 01 13:56:23 crc kubenswrapper[4605]: I1001 13:56:23.967359 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d1d0c7bc-2c71-4a74-b787-660c2866e7b1-trusted-ca-bundle\") pod \"console-689bbd87fc-tjjrg\" (UID: \"d1d0c7bc-2c71-4a74-b787-660c2866e7b1\") " pod="openshift-console/console-689bbd87fc-tjjrg" Oct 01 13:56:23 crc kubenswrapper[4605]: I1001 13:56:23.967699 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/d1d0c7bc-2c71-4a74-b787-660c2866e7b1-console-oauth-config\") pod \"console-689bbd87fc-tjjrg\" (UID: \"d1d0c7bc-2c71-4a74-b787-660c2866e7b1\") " pod="openshift-console/console-689bbd87fc-tjjrg" Oct 01 13:56:23 crc kubenswrapper[4605]: I1001 13:56:23.968334 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/d1d0c7bc-2c71-4a74-b787-660c2866e7b1-oauth-serving-cert\") pod \"console-689bbd87fc-tjjrg\" (UID: \"d1d0c7bc-2c71-4a74-b787-660c2866e7b1\") " pod="openshift-console/console-689bbd87fc-tjjrg" Oct 01 13:56:23 crc kubenswrapper[4605]: I1001 13:56:23.980067 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4z822\" (UniqueName: \"kubernetes.io/projected/d1d0c7bc-2c71-4a74-b787-660c2866e7b1-kube-api-access-4z822\") pod \"console-689bbd87fc-tjjrg\" (UID: \"d1d0c7bc-2c71-4a74-b787-660c2866e7b1\") " pod="openshift-console/console-689bbd87fc-tjjrg" Oct 01 13:56:24 crc kubenswrapper[4605]: I1001 13:56:24.009528 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-58fcddf996-k28lm"] Oct 01 13:56:24 crc kubenswrapper[4605]: I1001 13:56:24.064727 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/f75cf276-8bcb-4ab4-bed4-d12b1252691f-tls-key-pair\") pod \"nmstate-webhook-6d689559c5-dgsw9\" (UID: \"f75cf276-8bcb-4ab4-bed4-d12b1252691f\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-dgsw9" Oct 01 13:56:24 crc kubenswrapper[4605]: I1001 13:56:24.068434 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/f75cf276-8bcb-4ab4-bed4-d12b1252691f-tls-key-pair\") pod \"nmstate-webhook-6d689559c5-dgsw9\" (UID: \"f75cf276-8bcb-4ab4-bed4-d12b1252691f\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-dgsw9" Oct 01 13:56:24 crc kubenswrapper[4605]: I1001 13:56:24.144924 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-689bbd87fc-tjjrg" Oct 01 13:56:24 crc kubenswrapper[4605]: I1001 13:56:24.310132 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6d689559c5-dgsw9" Oct 01 13:56:24 crc kubenswrapper[4605]: I1001 13:56:24.347161 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-689bbd87fc-tjjrg"] Oct 01 13:56:24 crc kubenswrapper[4605]: W1001 13:56:24.357340 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1d0c7bc_2c71_4a74_b787_660c2866e7b1.slice/crio-68bd02afa1dcf5d357659e25970e7c362152bb1ffa7410e33565e6222be2ac7c WatchSource:0}: Error finding container 68bd02afa1dcf5d357659e25970e7c362152bb1ffa7410e33565e6222be2ac7c: Status 404 returned error can't find the container with id 68bd02afa1dcf5d357659e25970e7c362152bb1ffa7410e33565e6222be2ac7c Oct 01 13:56:24 crc kubenswrapper[4605]: I1001 13:56:24.367865 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/cceecdf3-965b-4939-a871-628e73d1ce1e-plugin-serving-cert\") pod \"nmstate-console-plugin-864bb6dfb5-w9zqn\" (UID: \"cceecdf3-965b-4939-a871-628e73d1ce1e\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-w9zqn" Oct 01 13:56:24 crc kubenswrapper[4605]: I1001 13:56:24.374782 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/cceecdf3-965b-4939-a871-628e73d1ce1e-plugin-serving-cert\") pod \"nmstate-console-plugin-864bb6dfb5-w9zqn\" (UID: \"cceecdf3-965b-4939-a871-628e73d1ce1e\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-w9zqn" Oct 01 13:56:24 crc kubenswrapper[4605]: I1001 13:56:24.436145 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-689bbd87fc-tjjrg" event={"ID":"d1d0c7bc-2c71-4a74-b787-660c2866e7b1","Type":"ContainerStarted","Data":"68bd02afa1dcf5d357659e25970e7c362152bb1ffa7410e33565e6222be2ac7c"} Oct 01 13:56:24 crc kubenswrapper[4605]: I1001 13:56:24.437887 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-58fcddf996-k28lm" event={"ID":"8f826dfa-48c9-42ed-8f62-e3ae00653a07","Type":"ContainerStarted","Data":"24cbca4396c03882d6919bdf9c4c6a652be63f175f1e022c72d46f466fb61534"} Oct 01 13:56:24 crc kubenswrapper[4605]: I1001 13:56:24.439149 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-txcp9" event={"ID":"d9977c55-dbd0-45e6-8483-5b0e5a279566","Type":"ContainerStarted","Data":"55115905928a315768ebbafdca78311b210305c9c2083ed0ee73f9a342f9bedf"} Oct 01 13:56:24 crc kubenswrapper[4605]: I1001 13:56:24.512993 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6d689559c5-dgsw9"] Oct 01 13:56:24 crc kubenswrapper[4605]: I1001 13:56:24.556261 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-w9zqn" Oct 01 13:56:24 crc kubenswrapper[4605]: I1001 13:56:24.759126 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-864bb6dfb5-w9zqn"] Oct 01 13:56:24 crc kubenswrapper[4605]: W1001 13:56:24.767908 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcceecdf3_965b_4939_a871_628e73d1ce1e.slice/crio-fa560e22a3d1a068d2263c936a227db3312c9bc3eb1b0dc8db626e62d02d5268 WatchSource:0}: Error finding container fa560e22a3d1a068d2263c936a227db3312c9bc3eb1b0dc8db626e62d02d5268: Status 404 returned error can't find the container with id fa560e22a3d1a068d2263c936a227db3312c9bc3eb1b0dc8db626e62d02d5268 Oct 01 13:56:25 crc kubenswrapper[4605]: I1001 13:56:25.445819 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-689bbd87fc-tjjrg" event={"ID":"d1d0c7bc-2c71-4a74-b787-660c2866e7b1","Type":"ContainerStarted","Data":"81dcb5037f1000f2d1b6391a146f34894c9d7d82abc434df0751f76984d446c1"} Oct 01 13:56:25 crc kubenswrapper[4605]: I1001 13:56:25.447169 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6d689559c5-dgsw9" event={"ID":"f75cf276-8bcb-4ab4-bed4-d12b1252691f","Type":"ContainerStarted","Data":"ddb42b626f75a7c1bb0cc62dbfebb6b998d89de1a0470dba20a78c467b942730"} Oct 01 13:56:25 crc kubenswrapper[4605]: I1001 13:56:25.449885 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-w9zqn" event={"ID":"cceecdf3-965b-4939-a871-628e73d1ce1e","Type":"ContainerStarted","Data":"fa560e22a3d1a068d2263c936a227db3312c9bc3eb1b0dc8db626e62d02d5268"} Oct 01 13:56:27 crc kubenswrapper[4605]: I1001 13:56:27.462081 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-58fcddf996-k28lm" event={"ID":"8f826dfa-48c9-42ed-8f62-e3ae00653a07","Type":"ContainerStarted","Data":"151c6e71d63ae09c80d126aab9134de9227a89d73f0c88abee83d437305cea6b"} Oct 01 13:56:27 crc kubenswrapper[4605]: I1001 13:56:27.463325 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-txcp9" event={"ID":"d9977c55-dbd0-45e6-8483-5b0e5a279566","Type":"ContainerStarted","Data":"c1dcaad68a9a640a473c72f29e3924b827bd7802e334245a13854ba3525f8eb2"} Oct 01 13:56:27 crc kubenswrapper[4605]: I1001 13:56:27.463467 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-txcp9" Oct 01 13:56:27 crc kubenswrapper[4605]: I1001 13:56:27.465867 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6d689559c5-dgsw9" event={"ID":"f75cf276-8bcb-4ab4-bed4-d12b1252691f","Type":"ContainerStarted","Data":"fbbb3f82545b38e0153da5fa9829b0f70f79aa472f61a62f3dd28f97dc15b60a"} Oct 01 13:56:27 crc kubenswrapper[4605]: I1001 13:56:27.466012 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-6d689559c5-dgsw9" Oct 01 13:56:27 crc kubenswrapper[4605]: I1001 13:56:27.480253 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-txcp9" podStartSLOduration=1.6189999400000001 podStartE2EDuration="4.480235237s" podCreationTimestamp="2025-10-01 13:56:23 +0000 UTC" firstStartedPulling="2025-10-01 13:56:23.788189967 +0000 UTC m=+706.532166165" lastFinishedPulling="2025-10-01 13:56:26.649425254 +0000 UTC m=+709.393401462" observedRunningTime="2025-10-01 13:56:27.477847756 +0000 UTC m=+710.221823964" watchObservedRunningTime="2025-10-01 13:56:27.480235237 +0000 UTC m=+710.224211445" Oct 01 13:56:27 crc kubenswrapper[4605]: I1001 13:56:27.481900 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-689bbd87fc-tjjrg" podStartSLOduration=4.481890689 podStartE2EDuration="4.481890689s" podCreationTimestamp="2025-10-01 13:56:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 13:56:25.465314187 +0000 UTC m=+708.209290395" watchObservedRunningTime="2025-10-01 13:56:27.481890689 +0000 UTC m=+710.225866907" Oct 01 13:56:27 crc kubenswrapper[4605]: I1001 13:56:27.958236 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-6d689559c5-dgsw9" podStartSLOduration=2.849733883 podStartE2EDuration="4.958215248s" podCreationTimestamp="2025-10-01 13:56:23 +0000 UTC" firstStartedPulling="2025-10-01 13:56:24.532271128 +0000 UTC m=+707.276247336" lastFinishedPulling="2025-10-01 13:56:26.640752493 +0000 UTC m=+709.384728701" observedRunningTime="2025-10-01 13:56:27.493816322 +0000 UTC m=+710.237792530" watchObservedRunningTime="2025-10-01 13:56:27.958215248 +0000 UTC m=+710.702191456" Oct 01 13:56:28 crc kubenswrapper[4605]: I1001 13:56:28.482671 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-w9zqn" event={"ID":"cceecdf3-965b-4939-a871-628e73d1ce1e","Type":"ContainerStarted","Data":"b5b49b4cf37a23aaaefbf7aa1c7868987713e3a9b482def4171c0338ddd0bdc2"} Oct 01 13:56:28 crc kubenswrapper[4605]: I1001 13:56:28.500713 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-w9zqn" podStartSLOduration=2.60913182 podStartE2EDuration="5.500698477s" podCreationTimestamp="2025-10-01 13:56:23 +0000 UTC" firstStartedPulling="2025-10-01 13:56:24.769345079 +0000 UTC m=+707.513321307" lastFinishedPulling="2025-10-01 13:56:27.660911756 +0000 UTC m=+710.404887964" observedRunningTime="2025-10-01 13:56:28.499557398 +0000 UTC m=+711.243533616" watchObservedRunningTime="2025-10-01 13:56:28.500698477 +0000 UTC m=+711.244674675" Oct 01 13:56:29 crc kubenswrapper[4605]: I1001 13:56:29.496563 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-58fcddf996-k28lm" event={"ID":"8f826dfa-48c9-42ed-8f62-e3ae00653a07","Type":"ContainerStarted","Data":"fe450b9a541f0c00264b282d233ad109050a274a62f6606a95c375c61c58e4b0"} Oct 01 13:56:29 crc kubenswrapper[4605]: I1001 13:56:29.515736 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-58fcddf996-k28lm" podStartSLOduration=1.639617133 podStartE2EDuration="6.515709448s" podCreationTimestamp="2025-10-01 13:56:23 +0000 UTC" firstStartedPulling="2025-10-01 13:56:24.024707705 +0000 UTC m=+706.768683913" lastFinishedPulling="2025-10-01 13:56:28.90080002 +0000 UTC m=+711.644776228" observedRunningTime="2025-10-01 13:56:29.511973543 +0000 UTC m=+712.255949791" watchObservedRunningTime="2025-10-01 13:56:29.515709448 +0000 UTC m=+712.259685696" Oct 01 13:56:33 crc kubenswrapper[4605]: I1001 13:56:33.800463 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-txcp9" Oct 01 13:56:34 crc kubenswrapper[4605]: I1001 13:56:34.145133 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-689bbd87fc-tjjrg" Oct 01 13:56:34 crc kubenswrapper[4605]: I1001 13:56:34.145190 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-689bbd87fc-tjjrg" Oct 01 13:56:34 crc kubenswrapper[4605]: I1001 13:56:34.152623 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-689bbd87fc-tjjrg" Oct 01 13:56:34 crc kubenswrapper[4605]: I1001 13:56:34.528774 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-689bbd87fc-tjjrg" Oct 01 13:56:34 crc kubenswrapper[4605]: I1001 13:56:34.595083 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-2brwf"] Oct 01 13:56:44 crc kubenswrapper[4605]: I1001 13:56:44.315493 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-6d689559c5-dgsw9" Oct 01 13:56:51 crc kubenswrapper[4605]: I1001 13:56:51.630568 4605 patch_prober.go:28] interesting pod/machine-config-daemon-zdjh7 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 13:56:51 crc kubenswrapper[4605]: I1001 13:56:51.630984 4605 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 13:56:55 crc kubenswrapper[4605]: I1001 13:56:55.577837 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96clpr7"] Oct 01 13:56:55 crc kubenswrapper[4605]: I1001 13:56:55.579472 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96clpr7" Oct 01 13:56:55 crc kubenswrapper[4605]: I1001 13:56:55.581122 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Oct 01 13:56:55 crc kubenswrapper[4605]: I1001 13:56:55.591001 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/81bb3cba-46ef-47f6-8f79-326bd240dc58-bundle\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96clpr7\" (UID: \"81bb3cba-46ef-47f6-8f79-326bd240dc58\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96clpr7" Oct 01 13:56:55 crc kubenswrapper[4605]: I1001 13:56:55.591077 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/81bb3cba-46ef-47f6-8f79-326bd240dc58-util\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96clpr7\" (UID: \"81bb3cba-46ef-47f6-8f79-326bd240dc58\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96clpr7" Oct 01 13:56:55 crc kubenswrapper[4605]: I1001 13:56:55.591161 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gw566\" (UniqueName: \"kubernetes.io/projected/81bb3cba-46ef-47f6-8f79-326bd240dc58-kube-api-access-gw566\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96clpr7\" (UID: \"81bb3cba-46ef-47f6-8f79-326bd240dc58\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96clpr7" Oct 01 13:56:55 crc kubenswrapper[4605]: I1001 13:56:55.591221 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96clpr7"] Oct 01 13:56:55 crc kubenswrapper[4605]: I1001 13:56:55.691829 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/81bb3cba-46ef-47f6-8f79-326bd240dc58-bundle\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96clpr7\" (UID: \"81bb3cba-46ef-47f6-8f79-326bd240dc58\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96clpr7" Oct 01 13:56:55 crc kubenswrapper[4605]: I1001 13:56:55.691898 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/81bb3cba-46ef-47f6-8f79-326bd240dc58-util\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96clpr7\" (UID: \"81bb3cba-46ef-47f6-8f79-326bd240dc58\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96clpr7" Oct 01 13:56:55 crc kubenswrapper[4605]: I1001 13:56:55.691939 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gw566\" (UniqueName: \"kubernetes.io/projected/81bb3cba-46ef-47f6-8f79-326bd240dc58-kube-api-access-gw566\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96clpr7\" (UID: \"81bb3cba-46ef-47f6-8f79-326bd240dc58\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96clpr7" Oct 01 13:56:55 crc kubenswrapper[4605]: I1001 13:56:55.692419 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/81bb3cba-46ef-47f6-8f79-326bd240dc58-bundle\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96clpr7\" (UID: \"81bb3cba-46ef-47f6-8f79-326bd240dc58\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96clpr7" Oct 01 13:56:55 crc kubenswrapper[4605]: I1001 13:56:55.692595 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/81bb3cba-46ef-47f6-8f79-326bd240dc58-util\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96clpr7\" (UID: \"81bb3cba-46ef-47f6-8f79-326bd240dc58\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96clpr7" Oct 01 13:56:55 crc kubenswrapper[4605]: I1001 13:56:55.712376 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gw566\" (UniqueName: \"kubernetes.io/projected/81bb3cba-46ef-47f6-8f79-326bd240dc58-kube-api-access-gw566\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96clpr7\" (UID: \"81bb3cba-46ef-47f6-8f79-326bd240dc58\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96clpr7" Oct 01 13:56:55 crc kubenswrapper[4605]: I1001 13:56:55.911641 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96clpr7" Oct 01 13:56:56 crc kubenswrapper[4605]: I1001 13:56:56.122732 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96clpr7"] Oct 01 13:56:56 crc kubenswrapper[4605]: I1001 13:56:56.638052 4605 generic.go:334] "Generic (PLEG): container finished" podID="81bb3cba-46ef-47f6-8f79-326bd240dc58" containerID="e59c7c80fb8ab51a502ae2adeab64ff75c22fa2aa1eb54230f6adc2f0f2ea56e" exitCode=0 Oct 01 13:56:56 crc kubenswrapper[4605]: I1001 13:56:56.638145 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96clpr7" event={"ID":"81bb3cba-46ef-47f6-8f79-326bd240dc58","Type":"ContainerDied","Data":"e59c7c80fb8ab51a502ae2adeab64ff75c22fa2aa1eb54230f6adc2f0f2ea56e"} Oct 01 13:56:56 crc kubenswrapper[4605]: I1001 13:56:56.638374 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96clpr7" event={"ID":"81bb3cba-46ef-47f6-8f79-326bd240dc58","Type":"ContainerStarted","Data":"4f4b112266eaf5af3d4b7e7aabef6013dfd2c0101268462b464d929a5c0b527f"} Oct 01 13:56:58 crc kubenswrapper[4605]: I1001 13:56:58.659246 4605 generic.go:334] "Generic (PLEG): container finished" podID="81bb3cba-46ef-47f6-8f79-326bd240dc58" containerID="d17273d18b1f6edf521742acd6859b82777eadb2e00e696382705f6f4abcf4ff" exitCode=0 Oct 01 13:56:58 crc kubenswrapper[4605]: I1001 13:56:58.659302 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96clpr7" event={"ID":"81bb3cba-46ef-47f6-8f79-326bd240dc58","Type":"ContainerDied","Data":"d17273d18b1f6edf521742acd6859b82777eadb2e00e696382705f6f4abcf4ff"} Oct 01 13:56:59 crc kubenswrapper[4605]: I1001 13:56:59.644356 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-2brwf" podUID="33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb" containerName="console" containerID="cri-o://884c3fbb1455cc3222ababfb497105e23d4e5ef404a1c43d849793d293c8c3ac" gracePeriod=15 Oct 01 13:56:59 crc kubenswrapper[4605]: I1001 13:56:59.670362 4605 generic.go:334] "Generic (PLEG): container finished" podID="81bb3cba-46ef-47f6-8f79-326bd240dc58" containerID="eb67dd4db75c5b0ad17a73fc2562694c07f1fc9d7b335a1513b3b2fdb1006107" exitCode=0 Oct 01 13:56:59 crc kubenswrapper[4605]: I1001 13:56:59.670412 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96clpr7" event={"ID":"81bb3cba-46ef-47f6-8f79-326bd240dc58","Type":"ContainerDied","Data":"eb67dd4db75c5b0ad17a73fc2562694c07f1fc9d7b335a1513b3b2fdb1006107"} Oct 01 13:57:00 crc kubenswrapper[4605]: I1001 13:57:00.059898 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-2brwf_33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb/console/0.log" Oct 01 13:57:00 crc kubenswrapper[4605]: I1001 13:57:00.060210 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-2brwf" Oct 01 13:57:00 crc kubenswrapper[4605]: I1001 13:57:00.149730 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb-service-ca\") pod \"33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb\" (UID: \"33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb\") " Oct 01 13:57:00 crc kubenswrapper[4605]: I1001 13:57:00.149787 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb-console-oauth-config\") pod \"33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb\" (UID: \"33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb\") " Oct 01 13:57:00 crc kubenswrapper[4605]: I1001 13:57:00.149819 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cktqn\" (UniqueName: \"kubernetes.io/projected/33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb-kube-api-access-cktqn\") pod \"33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb\" (UID: \"33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb\") " Oct 01 13:57:00 crc kubenswrapper[4605]: I1001 13:57:00.149841 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb-trusted-ca-bundle\") pod \"33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb\" (UID: \"33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb\") " Oct 01 13:57:00 crc kubenswrapper[4605]: I1001 13:57:00.149886 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb-console-config\") pod \"33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb\" (UID: \"33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb\") " Oct 01 13:57:00 crc kubenswrapper[4605]: I1001 13:57:00.149913 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb-console-serving-cert\") pod \"33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb\" (UID: \"33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb\") " Oct 01 13:57:00 crc kubenswrapper[4605]: I1001 13:57:00.149946 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb-oauth-serving-cert\") pod \"33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb\" (UID: \"33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb\") " Oct 01 13:57:00 crc kubenswrapper[4605]: I1001 13:57:00.151567 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb-service-ca" (OuterVolumeSpecName: "service-ca") pod "33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb" (UID: "33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:57:00 crc kubenswrapper[4605]: I1001 13:57:00.151652 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb" (UID: "33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:57:00 crc kubenswrapper[4605]: I1001 13:57:00.151851 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb-console-config" (OuterVolumeSpecName: "console-config") pod "33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb" (UID: "33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:57:00 crc kubenswrapper[4605]: I1001 13:57:00.151862 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb" (UID: "33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:57:00 crc kubenswrapper[4605]: I1001 13:57:00.156156 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb" (UID: "33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:57:00 crc kubenswrapper[4605]: I1001 13:57:00.157284 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb" (UID: "33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:57:00 crc kubenswrapper[4605]: I1001 13:57:00.158057 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb-kube-api-access-cktqn" (OuterVolumeSpecName: "kube-api-access-cktqn") pod "33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb" (UID: "33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb"). InnerVolumeSpecName "kube-api-access-cktqn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:57:00 crc kubenswrapper[4605]: I1001 13:57:00.250845 4605 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb-console-config\") on node \"crc\" DevicePath \"\"" Oct 01 13:57:00 crc kubenswrapper[4605]: I1001 13:57:00.250875 4605 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb-console-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 13:57:00 crc kubenswrapper[4605]: I1001 13:57:00.250885 4605 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 13:57:00 crc kubenswrapper[4605]: I1001 13:57:00.250893 4605 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb-service-ca\") on node \"crc\" DevicePath \"\"" Oct 01 13:57:00 crc kubenswrapper[4605]: I1001 13:57:00.250901 4605 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb-console-oauth-config\") on node \"crc\" DevicePath \"\"" Oct 01 13:57:00 crc kubenswrapper[4605]: I1001 13:57:00.250909 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cktqn\" (UniqueName: \"kubernetes.io/projected/33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb-kube-api-access-cktqn\") on node \"crc\" DevicePath \"\"" Oct 01 13:57:00 crc kubenswrapper[4605]: I1001 13:57:00.250918 4605 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 13:57:00 crc kubenswrapper[4605]: I1001 13:57:00.680913 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-2brwf_33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb/console/0.log" Oct 01 13:57:00 crc kubenswrapper[4605]: I1001 13:57:00.680999 4605 generic.go:334] "Generic (PLEG): container finished" podID="33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb" containerID="884c3fbb1455cc3222ababfb497105e23d4e5ef404a1c43d849793d293c8c3ac" exitCode=2 Oct 01 13:57:00 crc kubenswrapper[4605]: I1001 13:57:00.681075 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-2brwf" event={"ID":"33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb","Type":"ContainerDied","Data":"884c3fbb1455cc3222ababfb497105e23d4e5ef404a1c43d849793d293c8c3ac"} Oct 01 13:57:00 crc kubenswrapper[4605]: I1001 13:57:00.681160 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-2brwf" Oct 01 13:57:00 crc kubenswrapper[4605]: I1001 13:57:00.681195 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-2brwf" event={"ID":"33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb","Type":"ContainerDied","Data":"8b1c49b4f8ef06803e7667c271185bf17107f81dbc088cfc8a3aac43d4dc6b5e"} Oct 01 13:57:00 crc kubenswrapper[4605]: I1001 13:57:00.681232 4605 scope.go:117] "RemoveContainer" containerID="884c3fbb1455cc3222ababfb497105e23d4e5ef404a1c43d849793d293c8c3ac" Oct 01 13:57:00 crc kubenswrapper[4605]: I1001 13:57:00.733809 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-2brwf"] Oct 01 13:57:00 crc kubenswrapper[4605]: I1001 13:57:00.740854 4605 scope.go:117] "RemoveContainer" containerID="884c3fbb1455cc3222ababfb497105e23d4e5ef404a1c43d849793d293c8c3ac" Oct 01 13:57:00 crc kubenswrapper[4605]: E1001 13:57:00.741449 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"884c3fbb1455cc3222ababfb497105e23d4e5ef404a1c43d849793d293c8c3ac\": container with ID starting with 884c3fbb1455cc3222ababfb497105e23d4e5ef404a1c43d849793d293c8c3ac not found: ID does not exist" containerID="884c3fbb1455cc3222ababfb497105e23d4e5ef404a1c43d849793d293c8c3ac" Oct 01 13:57:00 crc kubenswrapper[4605]: I1001 13:57:00.741534 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"884c3fbb1455cc3222ababfb497105e23d4e5ef404a1c43d849793d293c8c3ac"} err="failed to get container status \"884c3fbb1455cc3222ababfb497105e23d4e5ef404a1c43d849793d293c8c3ac\": rpc error: code = NotFound desc = could not find container \"884c3fbb1455cc3222ababfb497105e23d4e5ef404a1c43d849793d293c8c3ac\": container with ID starting with 884c3fbb1455cc3222ababfb497105e23d4e5ef404a1c43d849793d293c8c3ac not found: ID does not exist" Oct 01 13:57:00 crc kubenswrapper[4605]: I1001 13:57:00.742319 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-2brwf"] Oct 01 13:57:00 crc kubenswrapper[4605]: I1001 13:57:00.951287 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96clpr7" Oct 01 13:57:01 crc kubenswrapper[4605]: I1001 13:57:01.060483 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/81bb3cba-46ef-47f6-8f79-326bd240dc58-util\") pod \"81bb3cba-46ef-47f6-8f79-326bd240dc58\" (UID: \"81bb3cba-46ef-47f6-8f79-326bd240dc58\") " Oct 01 13:57:01 crc kubenswrapper[4605]: I1001 13:57:01.060613 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gw566\" (UniqueName: \"kubernetes.io/projected/81bb3cba-46ef-47f6-8f79-326bd240dc58-kube-api-access-gw566\") pod \"81bb3cba-46ef-47f6-8f79-326bd240dc58\" (UID: \"81bb3cba-46ef-47f6-8f79-326bd240dc58\") " Oct 01 13:57:01 crc kubenswrapper[4605]: I1001 13:57:01.060658 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/81bb3cba-46ef-47f6-8f79-326bd240dc58-bundle\") pod \"81bb3cba-46ef-47f6-8f79-326bd240dc58\" (UID: \"81bb3cba-46ef-47f6-8f79-326bd240dc58\") " Oct 01 13:57:01 crc kubenswrapper[4605]: I1001 13:57:01.061789 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/81bb3cba-46ef-47f6-8f79-326bd240dc58-bundle" (OuterVolumeSpecName: "bundle") pod "81bb3cba-46ef-47f6-8f79-326bd240dc58" (UID: "81bb3cba-46ef-47f6-8f79-326bd240dc58"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 13:57:01 crc kubenswrapper[4605]: I1001 13:57:01.066306 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/81bb3cba-46ef-47f6-8f79-326bd240dc58-kube-api-access-gw566" (OuterVolumeSpecName: "kube-api-access-gw566") pod "81bb3cba-46ef-47f6-8f79-326bd240dc58" (UID: "81bb3cba-46ef-47f6-8f79-326bd240dc58"). InnerVolumeSpecName "kube-api-access-gw566". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:57:01 crc kubenswrapper[4605]: I1001 13:57:01.077250 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/81bb3cba-46ef-47f6-8f79-326bd240dc58-util" (OuterVolumeSpecName: "util") pod "81bb3cba-46ef-47f6-8f79-326bd240dc58" (UID: "81bb3cba-46ef-47f6-8f79-326bd240dc58"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 13:57:01 crc kubenswrapper[4605]: I1001 13:57:01.161781 4605 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/81bb3cba-46ef-47f6-8f79-326bd240dc58-util\") on node \"crc\" DevicePath \"\"" Oct 01 13:57:01 crc kubenswrapper[4605]: I1001 13:57:01.161817 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gw566\" (UniqueName: \"kubernetes.io/projected/81bb3cba-46ef-47f6-8f79-326bd240dc58-kube-api-access-gw566\") on node \"crc\" DevicePath \"\"" Oct 01 13:57:01 crc kubenswrapper[4605]: I1001 13:57:01.161827 4605 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/81bb3cba-46ef-47f6-8f79-326bd240dc58-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 13:57:01 crc kubenswrapper[4605]: I1001 13:57:01.689398 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96clpr7" Oct 01 13:57:01 crc kubenswrapper[4605]: I1001 13:57:01.689529 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96clpr7" event={"ID":"81bb3cba-46ef-47f6-8f79-326bd240dc58","Type":"ContainerDied","Data":"4f4b112266eaf5af3d4b7e7aabef6013dfd2c0101268462b464d929a5c0b527f"} Oct 01 13:57:01 crc kubenswrapper[4605]: I1001 13:57:01.689580 4605 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4f4b112266eaf5af3d4b7e7aabef6013dfd2c0101268462b464d929a5c0b527f" Oct 01 13:57:01 crc kubenswrapper[4605]: I1001 13:57:01.935758 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb" path="/var/lib/kubelet/pods/33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb/volumes" Oct 01 13:57:08 crc kubenswrapper[4605]: I1001 13:57:08.738733 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-lf6wn"] Oct 01 13:57:08 crc kubenswrapper[4605]: I1001 13:57:08.739531 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-lf6wn" podUID="26fe0021-1c2a-4f4e-a6cb-86237a120608" containerName="controller-manager" containerID="cri-o://8fafc628b5d3b01cc49d30f63aabf25c8c903c0c366016c7123c18ac28f8f81f" gracePeriod=30 Oct 01 13:57:08 crc kubenswrapper[4605]: I1001 13:57:08.830936 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-hfdjq"] Oct 01 13:57:08 crc kubenswrapper[4605]: I1001 13:57:08.831144 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hfdjq" podUID="e87d23b8-e74b-4fa9-8f83-760ab58e224d" containerName="route-controller-manager" containerID="cri-o://23429c6f48681f44bc1948d1238a2516c1167904db2a51b2e480cc4907f2150c" gracePeriod=30 Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.308945 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-lf6wn" Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.388539 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hfdjq" Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.470600 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/26fe0021-1c2a-4f4e-a6cb-86237a120608-serving-cert\") pod \"26fe0021-1c2a-4f4e-a6cb-86237a120608\" (UID: \"26fe0021-1c2a-4f4e-a6cb-86237a120608\") " Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.470647 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/26fe0021-1c2a-4f4e-a6cb-86237a120608-client-ca\") pod \"26fe0021-1c2a-4f4e-a6cb-86237a120608\" (UID: \"26fe0021-1c2a-4f4e-a6cb-86237a120608\") " Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.470698 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/26fe0021-1c2a-4f4e-a6cb-86237a120608-config\") pod \"26fe0021-1c2a-4f4e-a6cb-86237a120608\" (UID: \"26fe0021-1c2a-4f4e-a6cb-86237a120608\") " Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.470739 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mzrh8\" (UniqueName: \"kubernetes.io/projected/26fe0021-1c2a-4f4e-a6cb-86237a120608-kube-api-access-mzrh8\") pod \"26fe0021-1c2a-4f4e-a6cb-86237a120608\" (UID: \"26fe0021-1c2a-4f4e-a6cb-86237a120608\") " Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.470811 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/26fe0021-1c2a-4f4e-a6cb-86237a120608-proxy-ca-bundles\") pod \"26fe0021-1c2a-4f4e-a6cb-86237a120608\" (UID: \"26fe0021-1c2a-4f4e-a6cb-86237a120608\") " Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.471851 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/26fe0021-1c2a-4f4e-a6cb-86237a120608-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "26fe0021-1c2a-4f4e-a6cb-86237a120608" (UID: "26fe0021-1c2a-4f4e-a6cb-86237a120608"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.471871 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/26fe0021-1c2a-4f4e-a6cb-86237a120608-client-ca" (OuterVolumeSpecName: "client-ca") pod "26fe0021-1c2a-4f4e-a6cb-86237a120608" (UID: "26fe0021-1c2a-4f4e-a6cb-86237a120608"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.472415 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/26fe0021-1c2a-4f4e-a6cb-86237a120608-config" (OuterVolumeSpecName: "config") pod "26fe0021-1c2a-4f4e-a6cb-86237a120608" (UID: "26fe0021-1c2a-4f4e-a6cb-86237a120608"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.476524 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/26fe0021-1c2a-4f4e-a6cb-86237a120608-kube-api-access-mzrh8" (OuterVolumeSpecName: "kube-api-access-mzrh8") pod "26fe0021-1c2a-4f4e-a6cb-86237a120608" (UID: "26fe0021-1c2a-4f4e-a6cb-86237a120608"). InnerVolumeSpecName "kube-api-access-mzrh8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.477423 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/26fe0021-1c2a-4f4e-a6cb-86237a120608-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "26fe0021-1c2a-4f4e-a6cb-86237a120608" (UID: "26fe0021-1c2a-4f4e-a6cb-86237a120608"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.571667 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e87d23b8-e74b-4fa9-8f83-760ab58e224d-serving-cert\") pod \"e87d23b8-e74b-4fa9-8f83-760ab58e224d\" (UID: \"e87d23b8-e74b-4fa9-8f83-760ab58e224d\") " Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.571773 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e87d23b8-e74b-4fa9-8f83-760ab58e224d-config\") pod \"e87d23b8-e74b-4fa9-8f83-760ab58e224d\" (UID: \"e87d23b8-e74b-4fa9-8f83-760ab58e224d\") " Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.571834 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e87d23b8-e74b-4fa9-8f83-760ab58e224d-client-ca\") pod \"e87d23b8-e74b-4fa9-8f83-760ab58e224d\" (UID: \"e87d23b8-e74b-4fa9-8f83-760ab58e224d\") " Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.571882 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-frcm4\" (UniqueName: \"kubernetes.io/projected/e87d23b8-e74b-4fa9-8f83-760ab58e224d-kube-api-access-frcm4\") pod \"e87d23b8-e74b-4fa9-8f83-760ab58e224d\" (UID: \"e87d23b8-e74b-4fa9-8f83-760ab58e224d\") " Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.572190 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mzrh8\" (UniqueName: \"kubernetes.io/projected/26fe0021-1c2a-4f4e-a6cb-86237a120608-kube-api-access-mzrh8\") on node \"crc\" DevicePath \"\"" Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.572208 4605 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/26fe0021-1c2a-4f4e-a6cb-86237a120608-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.572220 4605 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/26fe0021-1c2a-4f4e-a6cb-86237a120608-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.572231 4605 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/26fe0021-1c2a-4f4e-a6cb-86237a120608-client-ca\") on node \"crc\" DevicePath \"\"" Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.572242 4605 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/26fe0021-1c2a-4f4e-a6cb-86237a120608-config\") on node \"crc\" DevicePath \"\"" Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.572528 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e87d23b8-e74b-4fa9-8f83-760ab58e224d-config" (OuterVolumeSpecName: "config") pod "e87d23b8-e74b-4fa9-8f83-760ab58e224d" (UID: "e87d23b8-e74b-4fa9-8f83-760ab58e224d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.572563 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e87d23b8-e74b-4fa9-8f83-760ab58e224d-client-ca" (OuterVolumeSpecName: "client-ca") pod "e87d23b8-e74b-4fa9-8f83-760ab58e224d" (UID: "e87d23b8-e74b-4fa9-8f83-760ab58e224d"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.574523 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e87d23b8-e74b-4fa9-8f83-760ab58e224d-kube-api-access-frcm4" (OuterVolumeSpecName: "kube-api-access-frcm4") pod "e87d23b8-e74b-4fa9-8f83-760ab58e224d" (UID: "e87d23b8-e74b-4fa9-8f83-760ab58e224d"). InnerVolumeSpecName "kube-api-access-frcm4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.577379 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e87d23b8-e74b-4fa9-8f83-760ab58e224d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e87d23b8-e74b-4fa9-8f83-760ab58e224d" (UID: "e87d23b8-e74b-4fa9-8f83-760ab58e224d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.673476 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-frcm4\" (UniqueName: \"kubernetes.io/projected/e87d23b8-e74b-4fa9-8f83-760ab58e224d-kube-api-access-frcm4\") on node \"crc\" DevicePath \"\"" Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.673508 4605 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e87d23b8-e74b-4fa9-8f83-760ab58e224d-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.673519 4605 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e87d23b8-e74b-4fa9-8f83-760ab58e224d-config\") on node \"crc\" DevicePath \"\"" Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.673526 4605 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e87d23b8-e74b-4fa9-8f83-760ab58e224d-client-ca\") on node \"crc\" DevicePath \"\"" Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.736453 4605 generic.go:334] "Generic (PLEG): container finished" podID="e87d23b8-e74b-4fa9-8f83-760ab58e224d" containerID="23429c6f48681f44bc1948d1238a2516c1167904db2a51b2e480cc4907f2150c" exitCode=0 Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.736491 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hfdjq" Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.736546 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hfdjq" event={"ID":"e87d23b8-e74b-4fa9-8f83-760ab58e224d","Type":"ContainerDied","Data":"23429c6f48681f44bc1948d1238a2516c1167904db2a51b2e480cc4907f2150c"} Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.736588 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hfdjq" event={"ID":"e87d23b8-e74b-4fa9-8f83-760ab58e224d","Type":"ContainerDied","Data":"e90c05da60d5d2b390f4c4975701fe400eff20aa198fa42e127923e2f3dddd96"} Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.736608 4605 scope.go:117] "RemoveContainer" containerID="23429c6f48681f44bc1948d1238a2516c1167904db2a51b2e480cc4907f2150c" Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.738823 4605 generic.go:334] "Generic (PLEG): container finished" podID="26fe0021-1c2a-4f4e-a6cb-86237a120608" containerID="8fafc628b5d3b01cc49d30f63aabf25c8c903c0c366016c7123c18ac28f8f81f" exitCode=0 Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.738873 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-lf6wn" Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.738876 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-lf6wn" event={"ID":"26fe0021-1c2a-4f4e-a6cb-86237a120608","Type":"ContainerDied","Data":"8fafc628b5d3b01cc49d30f63aabf25c8c903c0c366016c7123c18ac28f8f81f"} Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.738991 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-lf6wn" event={"ID":"26fe0021-1c2a-4f4e-a6cb-86237a120608","Type":"ContainerDied","Data":"57a366074b349e65fd1386d22bb953f39f90289e6cb1cd267622811006966d42"} Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.754463 4605 scope.go:117] "RemoveContainer" containerID="23429c6f48681f44bc1948d1238a2516c1167904db2a51b2e480cc4907f2150c" Oct 01 13:57:09 crc kubenswrapper[4605]: E1001 13:57:09.755540 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"23429c6f48681f44bc1948d1238a2516c1167904db2a51b2e480cc4907f2150c\": container with ID starting with 23429c6f48681f44bc1948d1238a2516c1167904db2a51b2e480cc4907f2150c not found: ID does not exist" containerID="23429c6f48681f44bc1948d1238a2516c1167904db2a51b2e480cc4907f2150c" Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.755579 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"23429c6f48681f44bc1948d1238a2516c1167904db2a51b2e480cc4907f2150c"} err="failed to get container status \"23429c6f48681f44bc1948d1238a2516c1167904db2a51b2e480cc4907f2150c\": rpc error: code = NotFound desc = could not find container \"23429c6f48681f44bc1948d1238a2516c1167904db2a51b2e480cc4907f2150c\": container with ID starting with 23429c6f48681f44bc1948d1238a2516c1167904db2a51b2e480cc4907f2150c not found: ID does not exist" Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.755606 4605 scope.go:117] "RemoveContainer" containerID="8fafc628b5d3b01cc49d30f63aabf25c8c903c0c366016c7123c18ac28f8f81f" Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.767985 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-hfdjq"] Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.773940 4605 scope.go:117] "RemoveContainer" containerID="8fafc628b5d3b01cc49d30f63aabf25c8c903c0c366016c7123c18ac28f8f81f" Oct 01 13:57:09 crc kubenswrapper[4605]: E1001 13:57:09.774626 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8fafc628b5d3b01cc49d30f63aabf25c8c903c0c366016c7123c18ac28f8f81f\": container with ID starting with 8fafc628b5d3b01cc49d30f63aabf25c8c903c0c366016c7123c18ac28f8f81f not found: ID does not exist" containerID="8fafc628b5d3b01cc49d30f63aabf25c8c903c0c366016c7123c18ac28f8f81f" Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.774661 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8fafc628b5d3b01cc49d30f63aabf25c8c903c0c366016c7123c18ac28f8f81f"} err="failed to get container status \"8fafc628b5d3b01cc49d30f63aabf25c8c903c0c366016c7123c18ac28f8f81f\": rpc error: code = NotFound desc = could not find container \"8fafc628b5d3b01cc49d30f63aabf25c8c903c0c366016c7123c18ac28f8f81f\": container with ID starting with 8fafc628b5d3b01cc49d30f63aabf25c8c903c0c366016c7123c18ac28f8f81f not found: ID does not exist" Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.778190 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-hfdjq"] Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.787297 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-lf6wn"] Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.804940 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-lf6wn"] Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.933450 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="26fe0021-1c2a-4f4e-a6cb-86237a120608" path="/var/lib/kubelet/pods/26fe0021-1c2a-4f4e-a6cb-86237a120608/volumes" Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.934194 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e87d23b8-e74b-4fa9-8f83-760ab58e224d" path="/var/lib/kubelet/pods/e87d23b8-e74b-4fa9-8f83-760ab58e224d/volumes" Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.934827 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-675f67df86-qjbw4"] Oct 01 13:57:09 crc kubenswrapper[4605]: E1001 13:57:09.943903 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81bb3cba-46ef-47f6-8f79-326bd240dc58" containerName="util" Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.956058 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="81bb3cba-46ef-47f6-8f79-326bd240dc58" containerName="util" Oct 01 13:57:09 crc kubenswrapper[4605]: E1001 13:57:09.956280 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e87d23b8-e74b-4fa9-8f83-760ab58e224d" containerName="route-controller-manager" Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.956335 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="e87d23b8-e74b-4fa9-8f83-760ab58e224d" containerName="route-controller-manager" Oct 01 13:57:09 crc kubenswrapper[4605]: E1001 13:57:09.956413 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26fe0021-1c2a-4f4e-a6cb-86237a120608" containerName="controller-manager" Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.956485 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="26fe0021-1c2a-4f4e-a6cb-86237a120608" containerName="controller-manager" Oct 01 13:57:09 crc kubenswrapper[4605]: E1001 13:57:09.956542 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb" containerName="console" Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.956591 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb" containerName="console" Oct 01 13:57:09 crc kubenswrapper[4605]: E1001 13:57:09.956654 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81bb3cba-46ef-47f6-8f79-326bd240dc58" containerName="pull" Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.956704 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="81bb3cba-46ef-47f6-8f79-326bd240dc58" containerName="pull" Oct 01 13:57:09 crc kubenswrapper[4605]: E1001 13:57:09.956766 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81bb3cba-46ef-47f6-8f79-326bd240dc58" containerName="extract" Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.956823 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="81bb3cba-46ef-47f6-8f79-326bd240dc58" containerName="extract" Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.957256 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="81bb3cba-46ef-47f6-8f79-326bd240dc58" containerName="extract" Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.957333 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="26fe0021-1c2a-4f4e-a6cb-86237a120608" containerName="controller-manager" Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.957386 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="e87d23b8-e74b-4fa9-8f83-760ab58e224d" containerName="route-controller-manager" Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.957444 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="33008fbe-9d5a-4f7e-a02a-9a302ce0a0eb" containerName="console" Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.957992 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-787556f867-zwqxd"] Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.958721 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-675f67df86-qjbw4"] Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.967951 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-787556f867-zwqxd"] Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.959305 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-787556f867-zwqxd" Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.959264 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-675f67df86-qjbw4" Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.981810 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.981875 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.981936 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.982016 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.982555 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.982637 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.982732 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.982820 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.982893 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.982963 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.983053 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.983125 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Oct 01 13:57:09 crc kubenswrapper[4605]: I1001 13:57:09.987890 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Oct 01 13:57:10 crc kubenswrapper[4605]: I1001 13:57:10.077741 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/254777de-fea2-4d9e-abab-a407bec34126-client-ca\") pod \"controller-manager-787556f867-zwqxd\" (UID: \"254777de-fea2-4d9e-abab-a407bec34126\") " pod="openshift-controller-manager/controller-manager-787556f867-zwqxd" Oct 01 13:57:10 crc kubenswrapper[4605]: I1001 13:57:10.077837 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/254777de-fea2-4d9e-abab-a407bec34126-serving-cert\") pod \"controller-manager-787556f867-zwqxd\" (UID: \"254777de-fea2-4d9e-abab-a407bec34126\") " pod="openshift-controller-manager/controller-manager-787556f867-zwqxd" Oct 01 13:57:10 crc kubenswrapper[4605]: I1001 13:57:10.077889 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wk8td\" (UniqueName: \"kubernetes.io/projected/8415de30-2479-4f84-9943-a82906a16be5-kube-api-access-wk8td\") pod \"route-controller-manager-675f67df86-qjbw4\" (UID: \"8415de30-2479-4f84-9943-a82906a16be5\") " pod="openshift-route-controller-manager/route-controller-manager-675f67df86-qjbw4" Oct 01 13:57:10 crc kubenswrapper[4605]: I1001 13:57:10.077928 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/254777de-fea2-4d9e-abab-a407bec34126-config\") pod \"controller-manager-787556f867-zwqxd\" (UID: \"254777de-fea2-4d9e-abab-a407bec34126\") " pod="openshift-controller-manager/controller-manager-787556f867-zwqxd" Oct 01 13:57:10 crc kubenswrapper[4605]: I1001 13:57:10.077954 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8415de30-2479-4f84-9943-a82906a16be5-client-ca\") pod \"route-controller-manager-675f67df86-qjbw4\" (UID: \"8415de30-2479-4f84-9943-a82906a16be5\") " pod="openshift-route-controller-manager/route-controller-manager-675f67df86-qjbw4" Oct 01 13:57:10 crc kubenswrapper[4605]: I1001 13:57:10.077975 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/254777de-fea2-4d9e-abab-a407bec34126-proxy-ca-bundles\") pod \"controller-manager-787556f867-zwqxd\" (UID: \"254777de-fea2-4d9e-abab-a407bec34126\") " pod="openshift-controller-manager/controller-manager-787556f867-zwqxd" Oct 01 13:57:10 crc kubenswrapper[4605]: I1001 13:57:10.078004 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x2jkp\" (UniqueName: \"kubernetes.io/projected/254777de-fea2-4d9e-abab-a407bec34126-kube-api-access-x2jkp\") pod \"controller-manager-787556f867-zwqxd\" (UID: \"254777de-fea2-4d9e-abab-a407bec34126\") " pod="openshift-controller-manager/controller-manager-787556f867-zwqxd" Oct 01 13:57:10 crc kubenswrapper[4605]: I1001 13:57:10.078025 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8415de30-2479-4f84-9943-a82906a16be5-config\") pod \"route-controller-manager-675f67df86-qjbw4\" (UID: \"8415de30-2479-4f84-9943-a82906a16be5\") " pod="openshift-route-controller-manager/route-controller-manager-675f67df86-qjbw4" Oct 01 13:57:10 crc kubenswrapper[4605]: I1001 13:57:10.078045 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8415de30-2479-4f84-9943-a82906a16be5-serving-cert\") pod \"route-controller-manager-675f67df86-qjbw4\" (UID: \"8415de30-2479-4f84-9943-a82906a16be5\") " pod="openshift-route-controller-manager/route-controller-manager-675f67df86-qjbw4" Oct 01 13:57:10 crc kubenswrapper[4605]: I1001 13:57:10.179600 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wk8td\" (UniqueName: \"kubernetes.io/projected/8415de30-2479-4f84-9943-a82906a16be5-kube-api-access-wk8td\") pod \"route-controller-manager-675f67df86-qjbw4\" (UID: \"8415de30-2479-4f84-9943-a82906a16be5\") " pod="openshift-route-controller-manager/route-controller-manager-675f67df86-qjbw4" Oct 01 13:57:10 crc kubenswrapper[4605]: I1001 13:57:10.179928 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/254777de-fea2-4d9e-abab-a407bec34126-config\") pod \"controller-manager-787556f867-zwqxd\" (UID: \"254777de-fea2-4d9e-abab-a407bec34126\") " pod="openshift-controller-manager/controller-manager-787556f867-zwqxd" Oct 01 13:57:10 crc kubenswrapper[4605]: I1001 13:57:10.179949 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8415de30-2479-4f84-9943-a82906a16be5-client-ca\") pod \"route-controller-manager-675f67df86-qjbw4\" (UID: \"8415de30-2479-4f84-9943-a82906a16be5\") " pod="openshift-route-controller-manager/route-controller-manager-675f67df86-qjbw4" Oct 01 13:57:10 crc kubenswrapper[4605]: I1001 13:57:10.179966 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/254777de-fea2-4d9e-abab-a407bec34126-proxy-ca-bundles\") pod \"controller-manager-787556f867-zwqxd\" (UID: \"254777de-fea2-4d9e-abab-a407bec34126\") " pod="openshift-controller-manager/controller-manager-787556f867-zwqxd" Oct 01 13:57:10 crc kubenswrapper[4605]: I1001 13:57:10.179990 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x2jkp\" (UniqueName: \"kubernetes.io/projected/254777de-fea2-4d9e-abab-a407bec34126-kube-api-access-x2jkp\") pod \"controller-manager-787556f867-zwqxd\" (UID: \"254777de-fea2-4d9e-abab-a407bec34126\") " pod="openshift-controller-manager/controller-manager-787556f867-zwqxd" Oct 01 13:57:10 crc kubenswrapper[4605]: I1001 13:57:10.180008 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8415de30-2479-4f84-9943-a82906a16be5-config\") pod \"route-controller-manager-675f67df86-qjbw4\" (UID: \"8415de30-2479-4f84-9943-a82906a16be5\") " pod="openshift-route-controller-manager/route-controller-manager-675f67df86-qjbw4" Oct 01 13:57:10 crc kubenswrapper[4605]: I1001 13:57:10.180027 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8415de30-2479-4f84-9943-a82906a16be5-serving-cert\") pod \"route-controller-manager-675f67df86-qjbw4\" (UID: \"8415de30-2479-4f84-9943-a82906a16be5\") " pod="openshift-route-controller-manager/route-controller-manager-675f67df86-qjbw4" Oct 01 13:57:10 crc kubenswrapper[4605]: I1001 13:57:10.180050 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/254777de-fea2-4d9e-abab-a407bec34126-client-ca\") pod \"controller-manager-787556f867-zwqxd\" (UID: \"254777de-fea2-4d9e-abab-a407bec34126\") " pod="openshift-controller-manager/controller-manager-787556f867-zwqxd" Oct 01 13:57:10 crc kubenswrapper[4605]: I1001 13:57:10.180078 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/254777de-fea2-4d9e-abab-a407bec34126-serving-cert\") pod \"controller-manager-787556f867-zwqxd\" (UID: \"254777de-fea2-4d9e-abab-a407bec34126\") " pod="openshift-controller-manager/controller-manager-787556f867-zwqxd" Oct 01 13:57:10 crc kubenswrapper[4605]: I1001 13:57:10.180496 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-b7f675c95-dx7nr"] Oct 01 13:57:10 crc kubenswrapper[4605]: I1001 13:57:10.181047 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/254777de-fea2-4d9e-abab-a407bec34126-proxy-ca-bundles\") pod \"controller-manager-787556f867-zwqxd\" (UID: \"254777de-fea2-4d9e-abab-a407bec34126\") " pod="openshift-controller-manager/controller-manager-787556f867-zwqxd" Oct 01 13:57:10 crc kubenswrapper[4605]: I1001 13:57:10.181168 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-b7f675c95-dx7nr" Oct 01 13:57:10 crc kubenswrapper[4605]: I1001 13:57:10.181653 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/254777de-fea2-4d9e-abab-a407bec34126-client-ca\") pod \"controller-manager-787556f867-zwqxd\" (UID: \"254777de-fea2-4d9e-abab-a407bec34126\") " pod="openshift-controller-manager/controller-manager-787556f867-zwqxd" Oct 01 13:57:10 crc kubenswrapper[4605]: I1001 13:57:10.181800 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8415de30-2479-4f84-9943-a82906a16be5-client-ca\") pod \"route-controller-manager-675f67df86-qjbw4\" (UID: \"8415de30-2479-4f84-9943-a82906a16be5\") " pod="openshift-route-controller-manager/route-controller-manager-675f67df86-qjbw4" Oct 01 13:57:10 crc kubenswrapper[4605]: I1001 13:57:10.182208 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/254777de-fea2-4d9e-abab-a407bec34126-config\") pod \"controller-manager-787556f867-zwqxd\" (UID: \"254777de-fea2-4d9e-abab-a407bec34126\") " pod="openshift-controller-manager/controller-manager-787556f867-zwqxd" Oct 01 13:57:10 crc kubenswrapper[4605]: I1001 13:57:10.182437 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8415de30-2479-4f84-9943-a82906a16be5-config\") pod \"route-controller-manager-675f67df86-qjbw4\" (UID: \"8415de30-2479-4f84-9943-a82906a16be5\") " pod="openshift-route-controller-manager/route-controller-manager-675f67df86-qjbw4" Oct 01 13:57:10 crc kubenswrapper[4605]: I1001 13:57:10.189450 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8415de30-2479-4f84-9943-a82906a16be5-serving-cert\") pod \"route-controller-manager-675f67df86-qjbw4\" (UID: \"8415de30-2479-4f84-9943-a82906a16be5\") " pod="openshift-route-controller-manager/route-controller-manager-675f67df86-qjbw4" Oct 01 13:57:10 crc kubenswrapper[4605]: I1001 13:57:10.192656 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/254777de-fea2-4d9e-abab-a407bec34126-serving-cert\") pod \"controller-manager-787556f867-zwqxd\" (UID: \"254777de-fea2-4d9e-abab-a407bec34126\") " pod="openshift-controller-manager/controller-manager-787556f867-zwqxd" Oct 01 13:57:10 crc kubenswrapper[4605]: I1001 13:57:10.202325 4605 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Oct 01 13:57:10 crc kubenswrapper[4605]: I1001 13:57:10.202547 4605 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Oct 01 13:57:10 crc kubenswrapper[4605]: I1001 13:57:10.206463 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Oct 01 13:57:10 crc kubenswrapper[4605]: I1001 13:57:10.207372 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Oct 01 13:57:10 crc kubenswrapper[4605]: I1001 13:57:10.210284 4605 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-l88rp" Oct 01 13:57:10 crc kubenswrapper[4605]: I1001 13:57:10.234843 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wk8td\" (UniqueName: \"kubernetes.io/projected/8415de30-2479-4f84-9943-a82906a16be5-kube-api-access-wk8td\") pod \"route-controller-manager-675f67df86-qjbw4\" (UID: \"8415de30-2479-4f84-9943-a82906a16be5\") " pod="openshift-route-controller-manager/route-controller-manager-675f67df86-qjbw4" Oct 01 13:57:10 crc kubenswrapper[4605]: I1001 13:57:10.235690 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-b7f675c95-dx7nr"] Oct 01 13:57:10 crc kubenswrapper[4605]: I1001 13:57:10.258132 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x2jkp\" (UniqueName: \"kubernetes.io/projected/254777de-fea2-4d9e-abab-a407bec34126-kube-api-access-x2jkp\") pod \"controller-manager-787556f867-zwqxd\" (UID: \"254777de-fea2-4d9e-abab-a407bec34126\") " pod="openshift-controller-manager/controller-manager-787556f867-zwqxd" Oct 01 13:57:10 crc kubenswrapper[4605]: I1001 13:57:10.281327 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-84bv2\" (UniqueName: \"kubernetes.io/projected/3fa2d3ac-c6f5-40c4-96e7-88eaa6d1622c-kube-api-access-84bv2\") pod \"metallb-operator-controller-manager-b7f675c95-dx7nr\" (UID: \"3fa2d3ac-c6f5-40c4-96e7-88eaa6d1622c\") " pod="metallb-system/metallb-operator-controller-manager-b7f675c95-dx7nr" Oct 01 13:57:10 crc kubenswrapper[4605]: I1001 13:57:10.281549 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/3fa2d3ac-c6f5-40c4-96e7-88eaa6d1622c-webhook-cert\") pod \"metallb-operator-controller-manager-b7f675c95-dx7nr\" (UID: \"3fa2d3ac-c6f5-40c4-96e7-88eaa6d1622c\") " pod="metallb-system/metallb-operator-controller-manager-b7f675c95-dx7nr" Oct 01 13:57:10 crc kubenswrapper[4605]: I1001 13:57:10.281734 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/3fa2d3ac-c6f5-40c4-96e7-88eaa6d1622c-apiservice-cert\") pod \"metallb-operator-controller-manager-b7f675c95-dx7nr\" (UID: \"3fa2d3ac-c6f5-40c4-96e7-88eaa6d1622c\") " pod="metallb-system/metallb-operator-controller-manager-b7f675c95-dx7nr" Oct 01 13:57:10 crc kubenswrapper[4605]: I1001 13:57:10.306212 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-787556f867-zwqxd" Oct 01 13:57:10 crc kubenswrapper[4605]: I1001 13:57:10.317439 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-675f67df86-qjbw4" Oct 01 13:57:10 crc kubenswrapper[4605]: I1001 13:57:10.382647 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-84bv2\" (UniqueName: \"kubernetes.io/projected/3fa2d3ac-c6f5-40c4-96e7-88eaa6d1622c-kube-api-access-84bv2\") pod \"metallb-operator-controller-manager-b7f675c95-dx7nr\" (UID: \"3fa2d3ac-c6f5-40c4-96e7-88eaa6d1622c\") " pod="metallb-system/metallb-operator-controller-manager-b7f675c95-dx7nr" Oct 01 13:57:10 crc kubenswrapper[4605]: I1001 13:57:10.382693 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/3fa2d3ac-c6f5-40c4-96e7-88eaa6d1622c-webhook-cert\") pod \"metallb-operator-controller-manager-b7f675c95-dx7nr\" (UID: \"3fa2d3ac-c6f5-40c4-96e7-88eaa6d1622c\") " pod="metallb-system/metallb-operator-controller-manager-b7f675c95-dx7nr" Oct 01 13:57:10 crc kubenswrapper[4605]: I1001 13:57:10.382729 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/3fa2d3ac-c6f5-40c4-96e7-88eaa6d1622c-apiservice-cert\") pod \"metallb-operator-controller-manager-b7f675c95-dx7nr\" (UID: \"3fa2d3ac-c6f5-40c4-96e7-88eaa6d1622c\") " pod="metallb-system/metallb-operator-controller-manager-b7f675c95-dx7nr" Oct 01 13:57:10 crc kubenswrapper[4605]: I1001 13:57:10.394495 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/3fa2d3ac-c6f5-40c4-96e7-88eaa6d1622c-apiservice-cert\") pod \"metallb-operator-controller-manager-b7f675c95-dx7nr\" (UID: \"3fa2d3ac-c6f5-40c4-96e7-88eaa6d1622c\") " pod="metallb-system/metallb-operator-controller-manager-b7f675c95-dx7nr" Oct 01 13:57:10 crc kubenswrapper[4605]: I1001 13:57:10.396643 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/3fa2d3ac-c6f5-40c4-96e7-88eaa6d1622c-webhook-cert\") pod \"metallb-operator-controller-manager-b7f675c95-dx7nr\" (UID: \"3fa2d3ac-c6f5-40c4-96e7-88eaa6d1622c\") " pod="metallb-system/metallb-operator-controller-manager-b7f675c95-dx7nr" Oct 01 13:57:10 crc kubenswrapper[4605]: I1001 13:57:10.409801 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-84bv2\" (UniqueName: \"kubernetes.io/projected/3fa2d3ac-c6f5-40c4-96e7-88eaa6d1622c-kube-api-access-84bv2\") pod \"metallb-operator-controller-manager-b7f675c95-dx7nr\" (UID: \"3fa2d3ac-c6f5-40c4-96e7-88eaa6d1622c\") " pod="metallb-system/metallb-operator-controller-manager-b7f675c95-dx7nr" Oct 01 13:57:10 crc kubenswrapper[4605]: I1001 13:57:10.519334 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-6b6874df99-s8qkk"] Oct 01 13:57:10 crc kubenswrapper[4605]: I1001 13:57:10.520196 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-6b6874df99-s8qkk" Oct 01 13:57:10 crc kubenswrapper[4605]: I1001 13:57:10.520929 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-b7f675c95-dx7nr" Oct 01 13:57:10 crc kubenswrapper[4605]: I1001 13:57:10.531113 4605 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-44xsb" Oct 01 13:57:10 crc kubenswrapper[4605]: I1001 13:57:10.531314 4605 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Oct 01 13:57:10 crc kubenswrapper[4605]: I1001 13:57:10.545220 4605 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Oct 01 13:57:10 crc kubenswrapper[4605]: I1001 13:57:10.549566 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-6b6874df99-s8qkk"] Oct 01 13:57:10 crc kubenswrapper[4605]: I1001 13:57:10.585516 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-787556f867-zwqxd"] Oct 01 13:57:10 crc kubenswrapper[4605]: I1001 13:57:10.630928 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-675f67df86-qjbw4"] Oct 01 13:57:10 crc kubenswrapper[4605]: I1001 13:57:10.686995 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/d59274d2-8c52-4997-8e43-aab1e6f5ddd0-apiservice-cert\") pod \"metallb-operator-webhook-server-6b6874df99-s8qkk\" (UID: \"d59274d2-8c52-4997-8e43-aab1e6f5ddd0\") " pod="metallb-system/metallb-operator-webhook-server-6b6874df99-s8qkk" Oct 01 13:57:10 crc kubenswrapper[4605]: I1001 13:57:10.687065 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pmjg4\" (UniqueName: \"kubernetes.io/projected/d59274d2-8c52-4997-8e43-aab1e6f5ddd0-kube-api-access-pmjg4\") pod \"metallb-operator-webhook-server-6b6874df99-s8qkk\" (UID: \"d59274d2-8c52-4997-8e43-aab1e6f5ddd0\") " pod="metallb-system/metallb-operator-webhook-server-6b6874df99-s8qkk" Oct 01 13:57:10 crc kubenswrapper[4605]: I1001 13:57:10.687145 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/d59274d2-8c52-4997-8e43-aab1e6f5ddd0-webhook-cert\") pod \"metallb-operator-webhook-server-6b6874df99-s8qkk\" (UID: \"d59274d2-8c52-4997-8e43-aab1e6f5ddd0\") " pod="metallb-system/metallb-operator-webhook-server-6b6874df99-s8qkk" Oct 01 13:57:10 crc kubenswrapper[4605]: I1001 13:57:10.788425 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/d59274d2-8c52-4997-8e43-aab1e6f5ddd0-apiservice-cert\") pod \"metallb-operator-webhook-server-6b6874df99-s8qkk\" (UID: \"d59274d2-8c52-4997-8e43-aab1e6f5ddd0\") " pod="metallb-system/metallb-operator-webhook-server-6b6874df99-s8qkk" Oct 01 13:57:10 crc kubenswrapper[4605]: I1001 13:57:10.788498 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pmjg4\" (UniqueName: \"kubernetes.io/projected/d59274d2-8c52-4997-8e43-aab1e6f5ddd0-kube-api-access-pmjg4\") pod \"metallb-operator-webhook-server-6b6874df99-s8qkk\" (UID: \"d59274d2-8c52-4997-8e43-aab1e6f5ddd0\") " pod="metallb-system/metallb-operator-webhook-server-6b6874df99-s8qkk" Oct 01 13:57:10 crc kubenswrapper[4605]: I1001 13:57:10.788525 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/d59274d2-8c52-4997-8e43-aab1e6f5ddd0-webhook-cert\") pod \"metallb-operator-webhook-server-6b6874df99-s8qkk\" (UID: \"d59274d2-8c52-4997-8e43-aab1e6f5ddd0\") " pod="metallb-system/metallb-operator-webhook-server-6b6874df99-s8qkk" Oct 01 13:57:10 crc kubenswrapper[4605]: I1001 13:57:10.799193 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/d59274d2-8c52-4997-8e43-aab1e6f5ddd0-webhook-cert\") pod \"metallb-operator-webhook-server-6b6874df99-s8qkk\" (UID: \"d59274d2-8c52-4997-8e43-aab1e6f5ddd0\") " pod="metallb-system/metallb-operator-webhook-server-6b6874df99-s8qkk" Oct 01 13:57:10 crc kubenswrapper[4605]: I1001 13:57:10.799680 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/d59274d2-8c52-4997-8e43-aab1e6f5ddd0-apiservice-cert\") pod \"metallb-operator-webhook-server-6b6874df99-s8qkk\" (UID: \"d59274d2-8c52-4997-8e43-aab1e6f5ddd0\") " pod="metallb-system/metallb-operator-webhook-server-6b6874df99-s8qkk" Oct 01 13:57:10 crc kubenswrapper[4605]: I1001 13:57:10.824186 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pmjg4\" (UniqueName: \"kubernetes.io/projected/d59274d2-8c52-4997-8e43-aab1e6f5ddd0-kube-api-access-pmjg4\") pod \"metallb-operator-webhook-server-6b6874df99-s8qkk\" (UID: \"d59274d2-8c52-4997-8e43-aab1e6f5ddd0\") " pod="metallb-system/metallb-operator-webhook-server-6b6874df99-s8qkk" Oct 01 13:57:10 crc kubenswrapper[4605]: I1001 13:57:10.867542 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-6b6874df99-s8qkk" Oct 01 13:57:10 crc kubenswrapper[4605]: I1001 13:57:10.875492 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-787556f867-zwqxd"] Oct 01 13:57:10 crc kubenswrapper[4605]: I1001 13:57:10.942676 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-675f67df86-qjbw4"] Oct 01 13:57:11 crc kubenswrapper[4605]: I1001 13:57:11.150404 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-b7f675c95-dx7nr"] Oct 01 13:57:11 crc kubenswrapper[4605]: I1001 13:57:11.428686 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-6b6874df99-s8qkk"] Oct 01 13:57:11 crc kubenswrapper[4605]: W1001 13:57:11.434698 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd59274d2_8c52_4997_8e43_aab1e6f5ddd0.slice/crio-ec0af55c3cc13390526b7ce376eb393f1cb0e672ca5c56f50c3886a7ff36c6e2 WatchSource:0}: Error finding container ec0af55c3cc13390526b7ce376eb393f1cb0e672ca5c56f50c3886a7ff36c6e2: Status 404 returned error can't find the container with id ec0af55c3cc13390526b7ce376eb393f1cb0e672ca5c56f50c3886a7ff36c6e2 Oct 01 13:57:11 crc kubenswrapper[4605]: I1001 13:57:11.770151 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-675f67df86-qjbw4" event={"ID":"8415de30-2479-4f84-9943-a82906a16be5","Type":"ContainerStarted","Data":"56a89262a85ebf01dfa4f61ed18297d66d62e815e3175eb9540316a68151cfa3"} Oct 01 13:57:11 crc kubenswrapper[4605]: I1001 13:57:11.770188 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-675f67df86-qjbw4" event={"ID":"8415de30-2479-4f84-9943-a82906a16be5","Type":"ContainerStarted","Data":"b8236568528edef9a844baf841801e350c5c339e0b0783db5e60dfd7fbd73d66"} Oct 01 13:57:11 crc kubenswrapper[4605]: I1001 13:57:11.770284 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-675f67df86-qjbw4" podUID="8415de30-2479-4f84-9943-a82906a16be5" containerName="route-controller-manager" containerID="cri-o://56a89262a85ebf01dfa4f61ed18297d66d62e815e3175eb9540316a68151cfa3" gracePeriod=30 Oct 01 13:57:11 crc kubenswrapper[4605]: I1001 13:57:11.770936 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-675f67df86-qjbw4" Oct 01 13:57:11 crc kubenswrapper[4605]: I1001 13:57:11.772296 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-787556f867-zwqxd" event={"ID":"254777de-fea2-4d9e-abab-a407bec34126","Type":"ContainerStarted","Data":"675bef6e55fc2e60a453de34192a70e6d3fff1c805fa47764e316e0980e62790"} Oct 01 13:57:11 crc kubenswrapper[4605]: I1001 13:57:11.772318 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-787556f867-zwqxd" event={"ID":"254777de-fea2-4d9e-abab-a407bec34126","Type":"ContainerStarted","Data":"48a296046f4eacc81cf4d88ab1300be8a759c4f49c6d1e20af2afa396af91108"} Oct 01 13:57:11 crc kubenswrapper[4605]: I1001 13:57:11.772373 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-787556f867-zwqxd" podUID="254777de-fea2-4d9e-abab-a407bec34126" containerName="controller-manager" containerID="cri-o://675bef6e55fc2e60a453de34192a70e6d3fff1c805fa47764e316e0980e62790" gracePeriod=30 Oct 01 13:57:11 crc kubenswrapper[4605]: I1001 13:57:11.772525 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-787556f867-zwqxd" Oct 01 13:57:11 crc kubenswrapper[4605]: I1001 13:57:11.777939 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-b7f675c95-dx7nr" event={"ID":"3fa2d3ac-c6f5-40c4-96e7-88eaa6d1622c","Type":"ContainerStarted","Data":"31f08761a6b1274f630ef6f325df184ab760a69bfc8f0ad848ce8b6a93505830"} Oct 01 13:57:11 crc kubenswrapper[4605]: I1001 13:57:11.782031 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-6b6874df99-s8qkk" event={"ID":"d59274d2-8c52-4997-8e43-aab1e6f5ddd0","Type":"ContainerStarted","Data":"ec0af55c3cc13390526b7ce376eb393f1cb0e672ca5c56f50c3886a7ff36c6e2"} Oct 01 13:57:11 crc kubenswrapper[4605]: I1001 13:57:11.786631 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-787556f867-zwqxd" Oct 01 13:57:11 crc kubenswrapper[4605]: I1001 13:57:11.886213 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-675f67df86-qjbw4" podStartSLOduration=3.88619528 podStartE2EDuration="3.88619528s" podCreationTimestamp="2025-10-01 13:57:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 13:57:11.833556035 +0000 UTC m=+754.577532253" watchObservedRunningTime="2025-10-01 13:57:11.88619528 +0000 UTC m=+754.630171488" Oct 01 13:57:11 crc kubenswrapper[4605]: I1001 13:57:11.887142 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-787556f867-zwqxd" podStartSLOduration=3.887136454 podStartE2EDuration="3.887136454s" podCreationTimestamp="2025-10-01 13:57:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 13:57:11.883980143 +0000 UTC m=+754.627956352" watchObservedRunningTime="2025-10-01 13:57:11.887136454 +0000 UTC m=+754.631112672" Oct 01 13:57:11 crc kubenswrapper[4605]: I1001 13:57:11.935844 4605 patch_prober.go:28] interesting pod/route-controller-manager-675f67df86-qjbw4 container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.47:8443/healthz\": read tcp 10.217.0.2:59604->10.217.0.47:8443: read: connection reset by peer" start-of-body= Oct 01 13:57:11 crc kubenswrapper[4605]: I1001 13:57:11.935903 4605 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-675f67df86-qjbw4" podUID="8415de30-2479-4f84-9943-a82906a16be5" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.47:8443/healthz\": read tcp 10.217.0.2:59604->10.217.0.47:8443: read: connection reset by peer" Oct 01 13:57:12 crc kubenswrapper[4605]: I1001 13:57:12.200740 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-787556f867-zwqxd" Oct 01 13:57:12 crc kubenswrapper[4605]: I1001 13:57:12.227014 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-7d8646bcf5-dgkl4"] Oct 01 13:57:12 crc kubenswrapper[4605]: E1001 13:57:12.227272 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="254777de-fea2-4d9e-abab-a407bec34126" containerName="controller-manager" Oct 01 13:57:12 crc kubenswrapper[4605]: I1001 13:57:12.227284 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="254777de-fea2-4d9e-abab-a407bec34126" containerName="controller-manager" Oct 01 13:57:12 crc kubenswrapper[4605]: I1001 13:57:12.227383 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="254777de-fea2-4d9e-abab-a407bec34126" containerName="controller-manager" Oct 01 13:57:12 crc kubenswrapper[4605]: I1001 13:57:12.227710 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7d8646bcf5-dgkl4" Oct 01 13:57:12 crc kubenswrapper[4605]: I1001 13:57:12.239216 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7d8646bcf5-dgkl4"] Oct 01 13:57:12 crc kubenswrapper[4605]: I1001 13:57:12.314194 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/254777de-fea2-4d9e-abab-a407bec34126-client-ca\") pod \"254777de-fea2-4d9e-abab-a407bec34126\" (UID: \"254777de-fea2-4d9e-abab-a407bec34126\") " Oct 01 13:57:12 crc kubenswrapper[4605]: I1001 13:57:12.314439 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/254777de-fea2-4d9e-abab-a407bec34126-config\") pod \"254777de-fea2-4d9e-abab-a407bec34126\" (UID: \"254777de-fea2-4d9e-abab-a407bec34126\") " Oct 01 13:57:12 crc kubenswrapper[4605]: I1001 13:57:12.314601 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/254777de-fea2-4d9e-abab-a407bec34126-serving-cert\") pod \"254777de-fea2-4d9e-abab-a407bec34126\" (UID: \"254777de-fea2-4d9e-abab-a407bec34126\") " Oct 01 13:57:12 crc kubenswrapper[4605]: I1001 13:57:12.314761 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/254777de-fea2-4d9e-abab-a407bec34126-proxy-ca-bundles\") pod \"254777de-fea2-4d9e-abab-a407bec34126\" (UID: \"254777de-fea2-4d9e-abab-a407bec34126\") " Oct 01 13:57:12 crc kubenswrapper[4605]: I1001 13:57:12.315298 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2jkp\" (UniqueName: \"kubernetes.io/projected/254777de-fea2-4d9e-abab-a407bec34126-kube-api-access-x2jkp\") pod \"254777de-fea2-4d9e-abab-a407bec34126\" (UID: \"254777de-fea2-4d9e-abab-a407bec34126\") " Oct 01 13:57:12 crc kubenswrapper[4605]: I1001 13:57:12.315212 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/254777de-fea2-4d9e-abab-a407bec34126-client-ca" (OuterVolumeSpecName: "client-ca") pod "254777de-fea2-4d9e-abab-a407bec34126" (UID: "254777de-fea2-4d9e-abab-a407bec34126"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:57:12 crc kubenswrapper[4605]: I1001 13:57:12.315260 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/254777de-fea2-4d9e-abab-a407bec34126-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "254777de-fea2-4d9e-abab-a407bec34126" (UID: "254777de-fea2-4d9e-abab-a407bec34126"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:57:12 crc kubenswrapper[4605]: I1001 13:57:12.315411 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/254777de-fea2-4d9e-abab-a407bec34126-config" (OuterVolumeSpecName: "config") pod "254777de-fea2-4d9e-abab-a407bec34126" (UID: "254777de-fea2-4d9e-abab-a407bec34126"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:57:12 crc kubenswrapper[4605]: I1001 13:57:12.315617 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ab05f5af-8af0-4ade-82aa-4cd58e3593b6-serving-cert\") pod \"controller-manager-7d8646bcf5-dgkl4\" (UID: \"ab05f5af-8af0-4ade-82aa-4cd58e3593b6\") " pod="openshift-controller-manager/controller-manager-7d8646bcf5-dgkl4" Oct 01 13:57:12 crc kubenswrapper[4605]: I1001 13:57:12.315714 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/ab05f5af-8af0-4ade-82aa-4cd58e3593b6-proxy-ca-bundles\") pod \"controller-manager-7d8646bcf5-dgkl4\" (UID: \"ab05f5af-8af0-4ade-82aa-4cd58e3593b6\") " pod="openshift-controller-manager/controller-manager-7d8646bcf5-dgkl4" Oct 01 13:57:12 crc kubenswrapper[4605]: I1001 13:57:12.315802 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g6vh7\" (UniqueName: \"kubernetes.io/projected/ab05f5af-8af0-4ade-82aa-4cd58e3593b6-kube-api-access-g6vh7\") pod \"controller-manager-7d8646bcf5-dgkl4\" (UID: \"ab05f5af-8af0-4ade-82aa-4cd58e3593b6\") " pod="openshift-controller-manager/controller-manager-7d8646bcf5-dgkl4" Oct 01 13:57:12 crc kubenswrapper[4605]: I1001 13:57:12.315897 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ab05f5af-8af0-4ade-82aa-4cd58e3593b6-config\") pod \"controller-manager-7d8646bcf5-dgkl4\" (UID: \"ab05f5af-8af0-4ade-82aa-4cd58e3593b6\") " pod="openshift-controller-manager/controller-manager-7d8646bcf5-dgkl4" Oct 01 13:57:12 crc kubenswrapper[4605]: I1001 13:57:12.316038 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ab05f5af-8af0-4ade-82aa-4cd58e3593b6-client-ca\") pod \"controller-manager-7d8646bcf5-dgkl4\" (UID: \"ab05f5af-8af0-4ade-82aa-4cd58e3593b6\") " pod="openshift-controller-manager/controller-manager-7d8646bcf5-dgkl4" Oct 01 13:57:12 crc kubenswrapper[4605]: I1001 13:57:12.316172 4605 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/254777de-fea2-4d9e-abab-a407bec34126-client-ca\") on node \"crc\" DevicePath \"\"" Oct 01 13:57:12 crc kubenswrapper[4605]: I1001 13:57:12.316233 4605 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/254777de-fea2-4d9e-abab-a407bec34126-config\") on node \"crc\" DevicePath \"\"" Oct 01 13:57:12 crc kubenswrapper[4605]: I1001 13:57:12.316288 4605 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/254777de-fea2-4d9e-abab-a407bec34126-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 01 13:57:12 crc kubenswrapper[4605]: I1001 13:57:12.323251 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/254777de-fea2-4d9e-abab-a407bec34126-kube-api-access-x2jkp" (OuterVolumeSpecName: "kube-api-access-x2jkp") pod "254777de-fea2-4d9e-abab-a407bec34126" (UID: "254777de-fea2-4d9e-abab-a407bec34126"). InnerVolumeSpecName "kube-api-access-x2jkp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:57:12 crc kubenswrapper[4605]: I1001 13:57:12.323863 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/254777de-fea2-4d9e-abab-a407bec34126-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "254777de-fea2-4d9e-abab-a407bec34126" (UID: "254777de-fea2-4d9e-abab-a407bec34126"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:57:12 crc kubenswrapper[4605]: I1001 13:57:12.335249 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-route-controller-manager_route-controller-manager-675f67df86-qjbw4_8415de30-2479-4f84-9943-a82906a16be5/route-controller-manager/0.log" Oct 01 13:57:12 crc kubenswrapper[4605]: I1001 13:57:12.335867 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-675f67df86-qjbw4" Oct 01 13:57:12 crc kubenswrapper[4605]: I1001 13:57:12.417698 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8415de30-2479-4f84-9943-a82906a16be5-serving-cert\") pod \"8415de30-2479-4f84-9943-a82906a16be5\" (UID: \"8415de30-2479-4f84-9943-a82906a16be5\") " Oct 01 13:57:12 crc kubenswrapper[4605]: I1001 13:57:12.417763 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8415de30-2479-4f84-9943-a82906a16be5-client-ca\") pod \"8415de30-2479-4f84-9943-a82906a16be5\" (UID: \"8415de30-2479-4f84-9943-a82906a16be5\") " Oct 01 13:57:12 crc kubenswrapper[4605]: I1001 13:57:12.417789 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8415de30-2479-4f84-9943-a82906a16be5-config\") pod \"8415de30-2479-4f84-9943-a82906a16be5\" (UID: \"8415de30-2479-4f84-9943-a82906a16be5\") " Oct 01 13:57:12 crc kubenswrapper[4605]: I1001 13:57:12.417841 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wk8td\" (UniqueName: \"kubernetes.io/projected/8415de30-2479-4f84-9943-a82906a16be5-kube-api-access-wk8td\") pod \"8415de30-2479-4f84-9943-a82906a16be5\" (UID: \"8415de30-2479-4f84-9943-a82906a16be5\") " Oct 01 13:57:12 crc kubenswrapper[4605]: I1001 13:57:12.417997 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/ab05f5af-8af0-4ade-82aa-4cd58e3593b6-proxy-ca-bundles\") pod \"controller-manager-7d8646bcf5-dgkl4\" (UID: \"ab05f5af-8af0-4ade-82aa-4cd58e3593b6\") " pod="openshift-controller-manager/controller-manager-7d8646bcf5-dgkl4" Oct 01 13:57:12 crc kubenswrapper[4605]: I1001 13:57:12.418028 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g6vh7\" (UniqueName: \"kubernetes.io/projected/ab05f5af-8af0-4ade-82aa-4cd58e3593b6-kube-api-access-g6vh7\") pod \"controller-manager-7d8646bcf5-dgkl4\" (UID: \"ab05f5af-8af0-4ade-82aa-4cd58e3593b6\") " pod="openshift-controller-manager/controller-manager-7d8646bcf5-dgkl4" Oct 01 13:57:12 crc kubenswrapper[4605]: I1001 13:57:12.418049 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ab05f5af-8af0-4ade-82aa-4cd58e3593b6-config\") pod \"controller-manager-7d8646bcf5-dgkl4\" (UID: \"ab05f5af-8af0-4ade-82aa-4cd58e3593b6\") " pod="openshift-controller-manager/controller-manager-7d8646bcf5-dgkl4" Oct 01 13:57:12 crc kubenswrapper[4605]: I1001 13:57:12.418119 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ab05f5af-8af0-4ade-82aa-4cd58e3593b6-client-ca\") pod \"controller-manager-7d8646bcf5-dgkl4\" (UID: \"ab05f5af-8af0-4ade-82aa-4cd58e3593b6\") " pod="openshift-controller-manager/controller-manager-7d8646bcf5-dgkl4" Oct 01 13:57:12 crc kubenswrapper[4605]: I1001 13:57:12.418156 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ab05f5af-8af0-4ade-82aa-4cd58e3593b6-serving-cert\") pod \"controller-manager-7d8646bcf5-dgkl4\" (UID: \"ab05f5af-8af0-4ade-82aa-4cd58e3593b6\") " pod="openshift-controller-manager/controller-manager-7d8646bcf5-dgkl4" Oct 01 13:57:12 crc kubenswrapper[4605]: I1001 13:57:12.418210 4605 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/254777de-fea2-4d9e-abab-a407bec34126-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 13:57:12 crc kubenswrapper[4605]: I1001 13:57:12.418223 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2jkp\" (UniqueName: \"kubernetes.io/projected/254777de-fea2-4d9e-abab-a407bec34126-kube-api-access-x2jkp\") on node \"crc\" DevicePath \"\"" Oct 01 13:57:12 crc kubenswrapper[4605]: I1001 13:57:12.420008 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8415de30-2479-4f84-9943-a82906a16be5-client-ca" (OuterVolumeSpecName: "client-ca") pod "8415de30-2479-4f84-9943-a82906a16be5" (UID: "8415de30-2479-4f84-9943-a82906a16be5"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:57:12 crc kubenswrapper[4605]: I1001 13:57:12.425856 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8415de30-2479-4f84-9943-a82906a16be5-config" (OuterVolumeSpecName: "config") pod "8415de30-2479-4f84-9943-a82906a16be5" (UID: "8415de30-2479-4f84-9943-a82906a16be5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 13:57:12 crc kubenswrapper[4605]: I1001 13:57:12.428311 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ab05f5af-8af0-4ade-82aa-4cd58e3593b6-client-ca\") pod \"controller-manager-7d8646bcf5-dgkl4\" (UID: \"ab05f5af-8af0-4ade-82aa-4cd58e3593b6\") " pod="openshift-controller-manager/controller-manager-7d8646bcf5-dgkl4" Oct 01 13:57:12 crc kubenswrapper[4605]: I1001 13:57:12.428348 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/ab05f5af-8af0-4ade-82aa-4cd58e3593b6-proxy-ca-bundles\") pod \"controller-manager-7d8646bcf5-dgkl4\" (UID: \"ab05f5af-8af0-4ade-82aa-4cd58e3593b6\") " pod="openshift-controller-manager/controller-manager-7d8646bcf5-dgkl4" Oct 01 13:57:12 crc kubenswrapper[4605]: I1001 13:57:12.428860 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ab05f5af-8af0-4ade-82aa-4cd58e3593b6-config\") pod \"controller-manager-7d8646bcf5-dgkl4\" (UID: \"ab05f5af-8af0-4ade-82aa-4cd58e3593b6\") " pod="openshift-controller-manager/controller-manager-7d8646bcf5-dgkl4" Oct 01 13:57:12 crc kubenswrapper[4605]: I1001 13:57:12.429082 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8415de30-2479-4f84-9943-a82906a16be5-kube-api-access-wk8td" (OuterVolumeSpecName: "kube-api-access-wk8td") pod "8415de30-2479-4f84-9943-a82906a16be5" (UID: "8415de30-2479-4f84-9943-a82906a16be5"). InnerVolumeSpecName "kube-api-access-wk8td". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:57:12 crc kubenswrapper[4605]: I1001 13:57:12.429292 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ab05f5af-8af0-4ade-82aa-4cd58e3593b6-serving-cert\") pod \"controller-manager-7d8646bcf5-dgkl4\" (UID: \"ab05f5af-8af0-4ade-82aa-4cd58e3593b6\") " pod="openshift-controller-manager/controller-manager-7d8646bcf5-dgkl4" Oct 01 13:57:12 crc kubenswrapper[4605]: I1001 13:57:12.429628 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8415de30-2479-4f84-9943-a82906a16be5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8415de30-2479-4f84-9943-a82906a16be5" (UID: "8415de30-2479-4f84-9943-a82906a16be5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 13:57:12 crc kubenswrapper[4605]: I1001 13:57:12.442390 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g6vh7\" (UniqueName: \"kubernetes.io/projected/ab05f5af-8af0-4ade-82aa-4cd58e3593b6-kube-api-access-g6vh7\") pod \"controller-manager-7d8646bcf5-dgkl4\" (UID: \"ab05f5af-8af0-4ade-82aa-4cd58e3593b6\") " pod="openshift-controller-manager/controller-manager-7d8646bcf5-dgkl4" Oct 01 13:57:12 crc kubenswrapper[4605]: I1001 13:57:12.519937 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wk8td\" (UniqueName: \"kubernetes.io/projected/8415de30-2479-4f84-9943-a82906a16be5-kube-api-access-wk8td\") on node \"crc\" DevicePath \"\"" Oct 01 13:57:12 crc kubenswrapper[4605]: I1001 13:57:12.519982 4605 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8415de30-2479-4f84-9943-a82906a16be5-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 13:57:12 crc kubenswrapper[4605]: I1001 13:57:12.519995 4605 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8415de30-2479-4f84-9943-a82906a16be5-client-ca\") on node \"crc\" DevicePath \"\"" Oct 01 13:57:12 crc kubenswrapper[4605]: I1001 13:57:12.520009 4605 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8415de30-2479-4f84-9943-a82906a16be5-config\") on node \"crc\" DevicePath \"\"" Oct 01 13:57:12 crc kubenswrapper[4605]: I1001 13:57:12.583747 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7d8646bcf5-dgkl4" Oct 01 13:57:12 crc kubenswrapper[4605]: I1001 13:57:12.807585 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-route-controller-manager_route-controller-manager-675f67df86-qjbw4_8415de30-2479-4f84-9943-a82906a16be5/route-controller-manager/0.log" Oct 01 13:57:12 crc kubenswrapper[4605]: I1001 13:57:12.807934 4605 generic.go:334] "Generic (PLEG): container finished" podID="8415de30-2479-4f84-9943-a82906a16be5" containerID="56a89262a85ebf01dfa4f61ed18297d66d62e815e3175eb9540316a68151cfa3" exitCode=255 Oct 01 13:57:12 crc kubenswrapper[4605]: I1001 13:57:12.807976 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-675f67df86-qjbw4" event={"ID":"8415de30-2479-4f84-9943-a82906a16be5","Type":"ContainerDied","Data":"56a89262a85ebf01dfa4f61ed18297d66d62e815e3175eb9540316a68151cfa3"} Oct 01 13:57:12 crc kubenswrapper[4605]: I1001 13:57:12.808001 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-675f67df86-qjbw4" event={"ID":"8415de30-2479-4f84-9943-a82906a16be5","Type":"ContainerDied","Data":"b8236568528edef9a844baf841801e350c5c339e0b0783db5e60dfd7fbd73d66"} Oct 01 13:57:12 crc kubenswrapper[4605]: I1001 13:57:12.808017 4605 scope.go:117] "RemoveContainer" containerID="56a89262a85ebf01dfa4f61ed18297d66d62e815e3175eb9540316a68151cfa3" Oct 01 13:57:12 crc kubenswrapper[4605]: I1001 13:57:12.808154 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-675f67df86-qjbw4" Oct 01 13:57:12 crc kubenswrapper[4605]: I1001 13:57:12.815809 4605 generic.go:334] "Generic (PLEG): container finished" podID="254777de-fea2-4d9e-abab-a407bec34126" containerID="675bef6e55fc2e60a453de34192a70e6d3fff1c805fa47764e316e0980e62790" exitCode=0 Oct 01 13:57:12 crc kubenswrapper[4605]: I1001 13:57:12.815847 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-787556f867-zwqxd" event={"ID":"254777de-fea2-4d9e-abab-a407bec34126","Type":"ContainerDied","Data":"675bef6e55fc2e60a453de34192a70e6d3fff1c805fa47764e316e0980e62790"} Oct 01 13:57:12 crc kubenswrapper[4605]: I1001 13:57:12.815870 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-787556f867-zwqxd" event={"ID":"254777de-fea2-4d9e-abab-a407bec34126","Type":"ContainerDied","Data":"48a296046f4eacc81cf4d88ab1300be8a759c4f49c6d1e20af2afa396af91108"} Oct 01 13:57:12 crc kubenswrapper[4605]: I1001 13:57:12.815916 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-787556f867-zwqxd" Oct 01 13:57:12 crc kubenswrapper[4605]: I1001 13:57:12.849961 4605 scope.go:117] "RemoveContainer" containerID="56a89262a85ebf01dfa4f61ed18297d66d62e815e3175eb9540316a68151cfa3" Oct 01 13:57:12 crc kubenswrapper[4605]: E1001 13:57:12.850312 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"56a89262a85ebf01dfa4f61ed18297d66d62e815e3175eb9540316a68151cfa3\": container with ID starting with 56a89262a85ebf01dfa4f61ed18297d66d62e815e3175eb9540316a68151cfa3 not found: ID does not exist" containerID="56a89262a85ebf01dfa4f61ed18297d66d62e815e3175eb9540316a68151cfa3" Oct 01 13:57:12 crc kubenswrapper[4605]: I1001 13:57:12.850341 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"56a89262a85ebf01dfa4f61ed18297d66d62e815e3175eb9540316a68151cfa3"} err="failed to get container status \"56a89262a85ebf01dfa4f61ed18297d66d62e815e3175eb9540316a68151cfa3\": rpc error: code = NotFound desc = could not find container \"56a89262a85ebf01dfa4f61ed18297d66d62e815e3175eb9540316a68151cfa3\": container with ID starting with 56a89262a85ebf01dfa4f61ed18297d66d62e815e3175eb9540316a68151cfa3 not found: ID does not exist" Oct 01 13:57:12 crc kubenswrapper[4605]: I1001 13:57:12.850364 4605 scope.go:117] "RemoveContainer" containerID="675bef6e55fc2e60a453de34192a70e6d3fff1c805fa47764e316e0980e62790" Oct 01 13:57:12 crc kubenswrapper[4605]: I1001 13:57:12.857629 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-675f67df86-qjbw4"] Oct 01 13:57:12 crc kubenswrapper[4605]: I1001 13:57:12.862115 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7d8646bcf5-dgkl4"] Oct 01 13:57:12 crc kubenswrapper[4605]: I1001 13:57:12.869850 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-675f67df86-qjbw4"] Oct 01 13:57:12 crc kubenswrapper[4605]: I1001 13:57:12.878180 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-787556f867-zwqxd"] Oct 01 13:57:12 crc kubenswrapper[4605]: I1001 13:57:12.881790 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-787556f867-zwqxd"] Oct 01 13:57:12 crc kubenswrapper[4605]: I1001 13:57:12.884065 4605 scope.go:117] "RemoveContainer" containerID="675bef6e55fc2e60a453de34192a70e6d3fff1c805fa47764e316e0980e62790" Oct 01 13:57:12 crc kubenswrapper[4605]: E1001 13:57:12.884897 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"675bef6e55fc2e60a453de34192a70e6d3fff1c805fa47764e316e0980e62790\": container with ID starting with 675bef6e55fc2e60a453de34192a70e6d3fff1c805fa47764e316e0980e62790 not found: ID does not exist" containerID="675bef6e55fc2e60a453de34192a70e6d3fff1c805fa47764e316e0980e62790" Oct 01 13:57:12 crc kubenswrapper[4605]: I1001 13:57:12.885046 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"675bef6e55fc2e60a453de34192a70e6d3fff1c805fa47764e316e0980e62790"} err="failed to get container status \"675bef6e55fc2e60a453de34192a70e6d3fff1c805fa47764e316e0980e62790\": rpc error: code = NotFound desc = could not find container \"675bef6e55fc2e60a453de34192a70e6d3fff1c805fa47764e316e0980e62790\": container with ID starting with 675bef6e55fc2e60a453de34192a70e6d3fff1c805fa47764e316e0980e62790 not found: ID does not exist" Oct 01 13:57:13 crc kubenswrapper[4605]: I1001 13:57:13.824477 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7d8646bcf5-dgkl4" event={"ID":"ab05f5af-8af0-4ade-82aa-4cd58e3593b6","Type":"ContainerStarted","Data":"f80b27741362670326de34c8a9cb44c4c934d65bec057e2330035f24a97dbc1e"} Oct 01 13:57:13 crc kubenswrapper[4605]: I1001 13:57:13.824794 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7d8646bcf5-dgkl4" event={"ID":"ab05f5af-8af0-4ade-82aa-4cd58e3593b6","Type":"ContainerStarted","Data":"db859d2003082095f8df31e7254d42d75d9d5a33d1581c23e25a2ccf88707ba3"} Oct 01 13:57:13 crc kubenswrapper[4605]: I1001 13:57:13.824812 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-7d8646bcf5-dgkl4" Oct 01 13:57:13 crc kubenswrapper[4605]: I1001 13:57:13.830779 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-7d8646bcf5-dgkl4" Oct 01 13:57:13 crc kubenswrapper[4605]: I1001 13:57:13.845876 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-7d8646bcf5-dgkl4" podStartSLOduration=3.845860129 podStartE2EDuration="3.845860129s" podCreationTimestamp="2025-10-01 13:57:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 13:57:13.842983097 +0000 UTC m=+756.586959305" watchObservedRunningTime="2025-10-01 13:57:13.845860129 +0000 UTC m=+756.589836337" Oct 01 13:57:13 crc kubenswrapper[4605]: I1001 13:57:13.932403 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="254777de-fea2-4d9e-abab-a407bec34126" path="/var/lib/kubelet/pods/254777de-fea2-4d9e-abab-a407bec34126/volumes" Oct 01 13:57:13 crc kubenswrapper[4605]: I1001 13:57:13.932897 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8415de30-2479-4f84-9943-a82906a16be5" path="/var/lib/kubelet/pods/8415de30-2479-4f84-9943-a82906a16be5/volumes" Oct 01 13:57:14 crc kubenswrapper[4605]: I1001 13:57:14.941349 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5f4d6d7d48-hwkxh"] Oct 01 13:57:14 crc kubenswrapper[4605]: E1001 13:57:14.941570 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8415de30-2479-4f84-9943-a82906a16be5" containerName="route-controller-manager" Oct 01 13:57:14 crc kubenswrapper[4605]: I1001 13:57:14.941582 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="8415de30-2479-4f84-9943-a82906a16be5" containerName="route-controller-manager" Oct 01 13:57:14 crc kubenswrapper[4605]: I1001 13:57:14.941705 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="8415de30-2479-4f84-9943-a82906a16be5" containerName="route-controller-manager" Oct 01 13:57:14 crc kubenswrapper[4605]: I1001 13:57:14.942055 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5f4d6d7d48-hwkxh" Oct 01 13:57:14 crc kubenswrapper[4605]: I1001 13:57:14.945823 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Oct 01 13:57:14 crc kubenswrapper[4605]: I1001 13:57:14.946134 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Oct 01 13:57:14 crc kubenswrapper[4605]: I1001 13:57:14.946195 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Oct 01 13:57:14 crc kubenswrapper[4605]: I1001 13:57:14.946152 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Oct 01 13:57:14 crc kubenswrapper[4605]: I1001 13:57:14.946354 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Oct 01 13:57:14 crc kubenswrapper[4605]: I1001 13:57:14.946498 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Oct 01 13:57:14 crc kubenswrapper[4605]: I1001 13:57:14.960705 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5f4d6d7d48-hwkxh"] Oct 01 13:57:15 crc kubenswrapper[4605]: I1001 13:57:15.054796 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vrl9z\" (UniqueName: \"kubernetes.io/projected/068875b1-9042-4afb-a971-831413342b76-kube-api-access-vrl9z\") pod \"route-controller-manager-5f4d6d7d48-hwkxh\" (UID: \"068875b1-9042-4afb-a971-831413342b76\") " pod="openshift-route-controller-manager/route-controller-manager-5f4d6d7d48-hwkxh" Oct 01 13:57:15 crc kubenswrapper[4605]: I1001 13:57:15.054840 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/068875b1-9042-4afb-a971-831413342b76-config\") pod \"route-controller-manager-5f4d6d7d48-hwkxh\" (UID: \"068875b1-9042-4afb-a971-831413342b76\") " pod="openshift-route-controller-manager/route-controller-manager-5f4d6d7d48-hwkxh" Oct 01 13:57:15 crc kubenswrapper[4605]: I1001 13:57:15.054872 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/068875b1-9042-4afb-a971-831413342b76-serving-cert\") pod \"route-controller-manager-5f4d6d7d48-hwkxh\" (UID: \"068875b1-9042-4afb-a971-831413342b76\") " pod="openshift-route-controller-manager/route-controller-manager-5f4d6d7d48-hwkxh" Oct 01 13:57:15 crc kubenswrapper[4605]: I1001 13:57:15.054924 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/068875b1-9042-4afb-a971-831413342b76-client-ca\") pod \"route-controller-manager-5f4d6d7d48-hwkxh\" (UID: \"068875b1-9042-4afb-a971-831413342b76\") " pod="openshift-route-controller-manager/route-controller-manager-5f4d6d7d48-hwkxh" Oct 01 13:57:15 crc kubenswrapper[4605]: I1001 13:57:15.156024 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/068875b1-9042-4afb-a971-831413342b76-client-ca\") pod \"route-controller-manager-5f4d6d7d48-hwkxh\" (UID: \"068875b1-9042-4afb-a971-831413342b76\") " pod="openshift-route-controller-manager/route-controller-manager-5f4d6d7d48-hwkxh" Oct 01 13:57:15 crc kubenswrapper[4605]: I1001 13:57:15.156127 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vrl9z\" (UniqueName: \"kubernetes.io/projected/068875b1-9042-4afb-a971-831413342b76-kube-api-access-vrl9z\") pod \"route-controller-manager-5f4d6d7d48-hwkxh\" (UID: \"068875b1-9042-4afb-a971-831413342b76\") " pod="openshift-route-controller-manager/route-controller-manager-5f4d6d7d48-hwkxh" Oct 01 13:57:15 crc kubenswrapper[4605]: I1001 13:57:15.156160 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/068875b1-9042-4afb-a971-831413342b76-config\") pod \"route-controller-manager-5f4d6d7d48-hwkxh\" (UID: \"068875b1-9042-4afb-a971-831413342b76\") " pod="openshift-route-controller-manager/route-controller-manager-5f4d6d7d48-hwkxh" Oct 01 13:57:15 crc kubenswrapper[4605]: I1001 13:57:15.156202 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/068875b1-9042-4afb-a971-831413342b76-serving-cert\") pod \"route-controller-manager-5f4d6d7d48-hwkxh\" (UID: \"068875b1-9042-4afb-a971-831413342b76\") " pod="openshift-route-controller-manager/route-controller-manager-5f4d6d7d48-hwkxh" Oct 01 13:57:15 crc kubenswrapper[4605]: I1001 13:57:15.158585 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/068875b1-9042-4afb-a971-831413342b76-client-ca\") pod \"route-controller-manager-5f4d6d7d48-hwkxh\" (UID: \"068875b1-9042-4afb-a971-831413342b76\") " pod="openshift-route-controller-manager/route-controller-manager-5f4d6d7d48-hwkxh" Oct 01 13:57:15 crc kubenswrapper[4605]: I1001 13:57:15.159475 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/068875b1-9042-4afb-a971-831413342b76-config\") pod \"route-controller-manager-5f4d6d7d48-hwkxh\" (UID: \"068875b1-9042-4afb-a971-831413342b76\") " pod="openshift-route-controller-manager/route-controller-manager-5f4d6d7d48-hwkxh" Oct 01 13:57:15 crc kubenswrapper[4605]: I1001 13:57:15.179267 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/068875b1-9042-4afb-a971-831413342b76-serving-cert\") pod \"route-controller-manager-5f4d6d7d48-hwkxh\" (UID: \"068875b1-9042-4afb-a971-831413342b76\") " pod="openshift-route-controller-manager/route-controller-manager-5f4d6d7d48-hwkxh" Oct 01 13:57:15 crc kubenswrapper[4605]: I1001 13:57:15.201895 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vrl9z\" (UniqueName: \"kubernetes.io/projected/068875b1-9042-4afb-a971-831413342b76-kube-api-access-vrl9z\") pod \"route-controller-manager-5f4d6d7d48-hwkxh\" (UID: \"068875b1-9042-4afb-a971-831413342b76\") " pod="openshift-route-controller-manager/route-controller-manager-5f4d6d7d48-hwkxh" Oct 01 13:57:15 crc kubenswrapper[4605]: I1001 13:57:15.262421 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5f4d6d7d48-hwkxh" Oct 01 13:57:17 crc kubenswrapper[4605]: I1001 13:57:17.655391 4605 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Oct 01 13:57:19 crc kubenswrapper[4605]: I1001 13:57:19.430639 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5f4d6d7d48-hwkxh"] Oct 01 13:57:19 crc kubenswrapper[4605]: I1001 13:57:19.865322 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-6b6874df99-s8qkk" event={"ID":"d59274d2-8c52-4997-8e43-aab1e6f5ddd0","Type":"ContainerStarted","Data":"ef3ee8f7a32eb4e4f2a6de10e0af14574f29bf63b9cd1f581a6875f4332267a1"} Oct 01 13:57:19 crc kubenswrapper[4605]: I1001 13:57:19.865549 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-6b6874df99-s8qkk" Oct 01 13:57:19 crc kubenswrapper[4605]: I1001 13:57:19.866375 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-b7f675c95-dx7nr" event={"ID":"3fa2d3ac-c6f5-40c4-96e7-88eaa6d1622c","Type":"ContainerStarted","Data":"d107acc74bd33f032ff9d2265050b2869104868d1354d88cc296caaf461e4622"} Oct 01 13:57:19 crc kubenswrapper[4605]: I1001 13:57:19.868707 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5f4d6d7d48-hwkxh" event={"ID":"068875b1-9042-4afb-a971-831413342b76","Type":"ContainerStarted","Data":"35e266df216d631abb3cd63e3948717ffe4032c652a5c0bc1260aa296f7c4625"} Oct 01 13:57:19 crc kubenswrapper[4605]: I1001 13:57:19.868730 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5f4d6d7d48-hwkxh" event={"ID":"068875b1-9042-4afb-a971-831413342b76","Type":"ContainerStarted","Data":"c58b47464a729295d58618f813ef45dfe744ddb183a6c97735878d982e5ce37e"} Oct 01 13:57:19 crc kubenswrapper[4605]: I1001 13:57:19.869296 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-5f4d6d7d48-hwkxh" Oct 01 13:57:19 crc kubenswrapper[4605]: I1001 13:57:19.886679 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-6b6874df99-s8qkk" podStartSLOduration=2.47121307 podStartE2EDuration="9.886664862s" podCreationTimestamp="2025-10-01 13:57:10 +0000 UTC" firstStartedPulling="2025-10-01 13:57:11.437928865 +0000 UTC m=+754.181905073" lastFinishedPulling="2025-10-01 13:57:18.853380657 +0000 UTC m=+761.597356865" observedRunningTime="2025-10-01 13:57:19.885945524 +0000 UTC m=+762.629921742" watchObservedRunningTime="2025-10-01 13:57:19.886664862 +0000 UTC m=+762.630641070" Oct 01 13:57:19 crc kubenswrapper[4605]: I1001 13:57:19.925235 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-5f4d6d7d48-hwkxh" podStartSLOduration=9.925217339 podStartE2EDuration="9.925217339s" podCreationTimestamp="2025-10-01 13:57:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 13:57:19.920937111 +0000 UTC m=+762.664913319" watchObservedRunningTime="2025-10-01 13:57:19.925217339 +0000 UTC m=+762.669193547" Oct 01 13:57:19 crc kubenswrapper[4605]: I1001 13:57:19.952552 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-b7f675c95-dx7nr" podStartSLOduration=2.309566602 podStartE2EDuration="9.952536972s" podCreationTimestamp="2025-10-01 13:57:10 +0000 UTC" firstStartedPulling="2025-10-01 13:57:11.185938887 +0000 UTC m=+753.929915095" lastFinishedPulling="2025-10-01 13:57:18.828909257 +0000 UTC m=+761.572885465" observedRunningTime="2025-10-01 13:57:19.950470369 +0000 UTC m=+762.694446577" watchObservedRunningTime="2025-10-01 13:57:19.952536972 +0000 UTC m=+762.696513190" Oct 01 13:57:20 crc kubenswrapper[4605]: I1001 13:57:20.140864 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-5f4d6d7d48-hwkxh" Oct 01 13:57:20 crc kubenswrapper[4605]: I1001 13:57:20.521250 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-b7f675c95-dx7nr" Oct 01 13:57:21 crc kubenswrapper[4605]: I1001 13:57:21.631603 4605 patch_prober.go:28] interesting pod/machine-config-daemon-zdjh7 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 13:57:21 crc kubenswrapper[4605]: I1001 13:57:21.631655 4605 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 13:57:30 crc kubenswrapper[4605]: I1001 13:57:30.872921 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-6b6874df99-s8qkk" Oct 01 13:57:50 crc kubenswrapper[4605]: I1001 13:57:50.522981 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-b7f675c95-dx7nr" Oct 01 13:57:51 crc kubenswrapper[4605]: I1001 13:57:51.408521 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-6g94f"] Oct 01 13:57:51 crc kubenswrapper[4605]: I1001 13:57:51.417207 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-5478bdb765-bxrk6"] Oct 01 13:57:51 crc kubenswrapper[4605]: I1001 13:57:51.417657 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-bxrk6" Oct 01 13:57:51 crc kubenswrapper[4605]: I1001 13:57:51.418424 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-6g94f" Oct 01 13:57:51 crc kubenswrapper[4605]: I1001 13:57:51.421411 4605 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Oct 01 13:57:51 crc kubenswrapper[4605]: I1001 13:57:51.421508 4605 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-pp454" Oct 01 13:57:51 crc kubenswrapper[4605]: I1001 13:57:51.421509 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Oct 01 13:57:51 crc kubenswrapper[4605]: I1001 13:57:51.421669 4605 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Oct 01 13:57:51 crc kubenswrapper[4605]: I1001 13:57:51.437129 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-5478bdb765-bxrk6"] Oct 01 13:57:51 crc kubenswrapper[4605]: I1001 13:57:51.486238 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lr72g\" (UniqueName: \"kubernetes.io/projected/3476bcf6-f4c4-4db8-8ec9-b567c7d55872-kube-api-access-lr72g\") pod \"frr-k8s-6g94f\" (UID: \"3476bcf6-f4c4-4db8-8ec9-b567c7d55872\") " pod="metallb-system/frr-k8s-6g94f" Oct 01 13:57:51 crc kubenswrapper[4605]: I1001 13:57:51.486291 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3c7dd5c0-7119-4a6a-838d-a41cc422a655-cert\") pod \"frr-k8s-webhook-server-5478bdb765-bxrk6\" (UID: \"3c7dd5c0-7119-4a6a-838d-a41cc422a655\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-bxrk6" Oct 01 13:57:51 crc kubenswrapper[4605]: I1001 13:57:51.486318 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/3476bcf6-f4c4-4db8-8ec9-b567c7d55872-frr-sockets\") pod \"frr-k8s-6g94f\" (UID: \"3476bcf6-f4c4-4db8-8ec9-b567c7d55872\") " pod="metallb-system/frr-k8s-6g94f" Oct 01 13:57:51 crc kubenswrapper[4605]: I1001 13:57:51.486439 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/3476bcf6-f4c4-4db8-8ec9-b567c7d55872-frr-conf\") pod \"frr-k8s-6g94f\" (UID: \"3476bcf6-f4c4-4db8-8ec9-b567c7d55872\") " pod="metallb-system/frr-k8s-6g94f" Oct 01 13:57:51 crc kubenswrapper[4605]: I1001 13:57:51.486516 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vqsgv\" (UniqueName: \"kubernetes.io/projected/3c7dd5c0-7119-4a6a-838d-a41cc422a655-kube-api-access-vqsgv\") pod \"frr-k8s-webhook-server-5478bdb765-bxrk6\" (UID: \"3c7dd5c0-7119-4a6a-838d-a41cc422a655\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-bxrk6" Oct 01 13:57:51 crc kubenswrapper[4605]: I1001 13:57:51.486602 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/3476bcf6-f4c4-4db8-8ec9-b567c7d55872-frr-startup\") pod \"frr-k8s-6g94f\" (UID: \"3476bcf6-f4c4-4db8-8ec9-b567c7d55872\") " pod="metallb-system/frr-k8s-6g94f" Oct 01 13:57:51 crc kubenswrapper[4605]: I1001 13:57:51.486645 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/3476bcf6-f4c4-4db8-8ec9-b567c7d55872-reloader\") pod \"frr-k8s-6g94f\" (UID: \"3476bcf6-f4c4-4db8-8ec9-b567c7d55872\") " pod="metallb-system/frr-k8s-6g94f" Oct 01 13:57:51 crc kubenswrapper[4605]: I1001 13:57:51.486690 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3476bcf6-f4c4-4db8-8ec9-b567c7d55872-metrics-certs\") pod \"frr-k8s-6g94f\" (UID: \"3476bcf6-f4c4-4db8-8ec9-b567c7d55872\") " pod="metallb-system/frr-k8s-6g94f" Oct 01 13:57:51 crc kubenswrapper[4605]: I1001 13:57:51.486737 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/3476bcf6-f4c4-4db8-8ec9-b567c7d55872-metrics\") pod \"frr-k8s-6g94f\" (UID: \"3476bcf6-f4c4-4db8-8ec9-b567c7d55872\") " pod="metallb-system/frr-k8s-6g94f" Oct 01 13:57:51 crc kubenswrapper[4605]: I1001 13:57:51.505531 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-xrzw2"] Oct 01 13:57:51 crc kubenswrapper[4605]: I1001 13:57:51.506545 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-xrzw2" Oct 01 13:57:51 crc kubenswrapper[4605]: I1001 13:57:51.510398 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Oct 01 13:57:51 crc kubenswrapper[4605]: I1001 13:57:51.510409 4605 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Oct 01 13:57:51 crc kubenswrapper[4605]: I1001 13:57:51.510497 4605 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Oct 01 13:57:51 crc kubenswrapper[4605]: I1001 13:57:51.510562 4605 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-2rzv8" Oct 01 13:57:51 crc kubenswrapper[4605]: I1001 13:57:51.527594 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-5d688f5ffc-jlb75"] Oct 01 13:57:51 crc kubenswrapper[4605]: I1001 13:57:51.528394 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-5d688f5ffc-jlb75" Oct 01 13:57:51 crc kubenswrapper[4605]: I1001 13:57:51.531098 4605 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Oct 01 13:57:51 crc kubenswrapper[4605]: I1001 13:57:51.549721 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-5d688f5ffc-jlb75"] Oct 01 13:57:51 crc kubenswrapper[4605]: I1001 13:57:51.587828 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/3476bcf6-f4c4-4db8-8ec9-b567c7d55872-reloader\") pod \"frr-k8s-6g94f\" (UID: \"3476bcf6-f4c4-4db8-8ec9-b567c7d55872\") " pod="metallb-system/frr-k8s-6g94f" Oct 01 13:57:51 crc kubenswrapper[4605]: I1001 13:57:51.587874 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3476bcf6-f4c4-4db8-8ec9-b567c7d55872-metrics-certs\") pod \"frr-k8s-6g94f\" (UID: \"3476bcf6-f4c4-4db8-8ec9-b567c7d55872\") " pod="metallb-system/frr-k8s-6g94f" Oct 01 13:57:51 crc kubenswrapper[4605]: I1001 13:57:51.587897 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/3476bcf6-f4c4-4db8-8ec9-b567c7d55872-metrics\") pod \"frr-k8s-6g94f\" (UID: \"3476bcf6-f4c4-4db8-8ec9-b567c7d55872\") " pod="metallb-system/frr-k8s-6g94f" Oct 01 13:57:51 crc kubenswrapper[4605]: I1001 13:57:51.587920 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/369f1f43-8697-402d-a370-f72c820ddf13-metrics-certs\") pod \"speaker-xrzw2\" (UID: \"369f1f43-8697-402d-a370-f72c820ddf13\") " pod="metallb-system/speaker-xrzw2" Oct 01 13:57:51 crc kubenswrapper[4605]: I1001 13:57:51.587939 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c9b364bc-86c5-4b79-b38a-d8bbf447be04-metrics-certs\") pod \"controller-5d688f5ffc-jlb75\" (UID: \"c9b364bc-86c5-4b79-b38a-d8bbf447be04\") " pod="metallb-system/controller-5d688f5ffc-jlb75" Oct 01 13:57:51 crc kubenswrapper[4605]: I1001 13:57:51.587957 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lr72g\" (UniqueName: \"kubernetes.io/projected/3476bcf6-f4c4-4db8-8ec9-b567c7d55872-kube-api-access-lr72g\") pod \"frr-k8s-6g94f\" (UID: \"3476bcf6-f4c4-4db8-8ec9-b567c7d55872\") " pod="metallb-system/frr-k8s-6g94f" Oct 01 13:57:51 crc kubenswrapper[4605]: I1001 13:57:51.587975 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3c7dd5c0-7119-4a6a-838d-a41cc422a655-cert\") pod \"frr-k8s-webhook-server-5478bdb765-bxrk6\" (UID: \"3c7dd5c0-7119-4a6a-838d-a41cc422a655\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-bxrk6" Oct 01 13:57:51 crc kubenswrapper[4605]: I1001 13:57:51.587988 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/3476bcf6-f4c4-4db8-8ec9-b567c7d55872-frr-sockets\") pod \"frr-k8s-6g94f\" (UID: \"3476bcf6-f4c4-4db8-8ec9-b567c7d55872\") " pod="metallb-system/frr-k8s-6g94f" Oct 01 13:57:51 crc kubenswrapper[4605]: I1001 13:57:51.588009 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/3476bcf6-f4c4-4db8-8ec9-b567c7d55872-frr-conf\") pod \"frr-k8s-6g94f\" (UID: \"3476bcf6-f4c4-4db8-8ec9-b567c7d55872\") " pod="metallb-system/frr-k8s-6g94f" Oct 01 13:57:51 crc kubenswrapper[4605]: I1001 13:57:51.588025 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hbwbf\" (UniqueName: \"kubernetes.io/projected/c9b364bc-86c5-4b79-b38a-d8bbf447be04-kube-api-access-hbwbf\") pod \"controller-5d688f5ffc-jlb75\" (UID: \"c9b364bc-86c5-4b79-b38a-d8bbf447be04\") " pod="metallb-system/controller-5d688f5ffc-jlb75" Oct 01 13:57:51 crc kubenswrapper[4605]: I1001 13:57:51.588045 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/369f1f43-8697-402d-a370-f72c820ddf13-metallb-excludel2\") pod \"speaker-xrzw2\" (UID: \"369f1f43-8697-402d-a370-f72c820ddf13\") " pod="metallb-system/speaker-xrzw2" Oct 01 13:57:51 crc kubenswrapper[4605]: I1001 13:57:51.588127 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vqsgv\" (UniqueName: \"kubernetes.io/projected/3c7dd5c0-7119-4a6a-838d-a41cc422a655-kube-api-access-vqsgv\") pod \"frr-k8s-webhook-server-5478bdb765-bxrk6\" (UID: \"3c7dd5c0-7119-4a6a-838d-a41cc422a655\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-bxrk6" Oct 01 13:57:51 crc kubenswrapper[4605]: I1001 13:57:51.588161 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/369f1f43-8697-402d-a370-f72c820ddf13-memberlist\") pod \"speaker-xrzw2\" (UID: \"369f1f43-8697-402d-a370-f72c820ddf13\") " pod="metallb-system/speaker-xrzw2" Oct 01 13:57:51 crc kubenswrapper[4605]: I1001 13:57:51.588177 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/3476bcf6-f4c4-4db8-8ec9-b567c7d55872-frr-startup\") pod \"frr-k8s-6g94f\" (UID: \"3476bcf6-f4c4-4db8-8ec9-b567c7d55872\") " pod="metallb-system/frr-k8s-6g94f" Oct 01 13:57:51 crc kubenswrapper[4605]: I1001 13:57:51.588196 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c9b364bc-86c5-4b79-b38a-d8bbf447be04-cert\") pod \"controller-5d688f5ffc-jlb75\" (UID: \"c9b364bc-86c5-4b79-b38a-d8bbf447be04\") " pod="metallb-system/controller-5d688f5ffc-jlb75" Oct 01 13:57:51 crc kubenswrapper[4605]: I1001 13:57:51.588210 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2dlkz\" (UniqueName: \"kubernetes.io/projected/369f1f43-8697-402d-a370-f72c820ddf13-kube-api-access-2dlkz\") pod \"speaker-xrzw2\" (UID: \"369f1f43-8697-402d-a370-f72c820ddf13\") " pod="metallb-system/speaker-xrzw2" Oct 01 13:57:51 crc kubenswrapper[4605]: I1001 13:57:51.588649 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/3476bcf6-f4c4-4db8-8ec9-b567c7d55872-reloader\") pod \"frr-k8s-6g94f\" (UID: \"3476bcf6-f4c4-4db8-8ec9-b567c7d55872\") " pod="metallb-system/frr-k8s-6g94f" Oct 01 13:57:51 crc kubenswrapper[4605]: I1001 13:57:51.589482 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/3476bcf6-f4c4-4db8-8ec9-b567c7d55872-frr-conf\") pod \"frr-k8s-6g94f\" (UID: \"3476bcf6-f4c4-4db8-8ec9-b567c7d55872\") " pod="metallb-system/frr-k8s-6g94f" Oct 01 13:57:51 crc kubenswrapper[4605]: E1001 13:57:51.589568 4605 secret.go:188] Couldn't get secret metallb-system/frr-k8s-webhook-server-cert: secret "frr-k8s-webhook-server-cert" not found Oct 01 13:57:51 crc kubenswrapper[4605]: E1001 13:57:51.589663 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3c7dd5c0-7119-4a6a-838d-a41cc422a655-cert podName:3c7dd5c0-7119-4a6a-838d-a41cc422a655 nodeName:}" failed. No retries permitted until 2025-10-01 13:57:52.08964345 +0000 UTC m=+794.833619658 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/3c7dd5c0-7119-4a6a-838d-a41cc422a655-cert") pod "frr-k8s-webhook-server-5478bdb765-bxrk6" (UID: "3c7dd5c0-7119-4a6a-838d-a41cc422a655") : secret "frr-k8s-webhook-server-cert" not found Oct 01 13:57:51 crc kubenswrapper[4605]: I1001 13:57:51.589712 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/3476bcf6-f4c4-4db8-8ec9-b567c7d55872-metrics\") pod \"frr-k8s-6g94f\" (UID: \"3476bcf6-f4c4-4db8-8ec9-b567c7d55872\") " pod="metallb-system/frr-k8s-6g94f" Oct 01 13:57:51 crc kubenswrapper[4605]: I1001 13:57:51.589721 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/3476bcf6-f4c4-4db8-8ec9-b567c7d55872-frr-sockets\") pod \"frr-k8s-6g94f\" (UID: \"3476bcf6-f4c4-4db8-8ec9-b567c7d55872\") " pod="metallb-system/frr-k8s-6g94f" Oct 01 13:57:51 crc kubenswrapper[4605]: I1001 13:57:51.590388 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/3476bcf6-f4c4-4db8-8ec9-b567c7d55872-frr-startup\") pod \"frr-k8s-6g94f\" (UID: \"3476bcf6-f4c4-4db8-8ec9-b567c7d55872\") " pod="metallb-system/frr-k8s-6g94f" Oct 01 13:57:51 crc kubenswrapper[4605]: I1001 13:57:51.609863 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vqsgv\" (UniqueName: \"kubernetes.io/projected/3c7dd5c0-7119-4a6a-838d-a41cc422a655-kube-api-access-vqsgv\") pod \"frr-k8s-webhook-server-5478bdb765-bxrk6\" (UID: \"3c7dd5c0-7119-4a6a-838d-a41cc422a655\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-bxrk6" Oct 01 13:57:51 crc kubenswrapper[4605]: I1001 13:57:51.611840 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3476bcf6-f4c4-4db8-8ec9-b567c7d55872-metrics-certs\") pod \"frr-k8s-6g94f\" (UID: \"3476bcf6-f4c4-4db8-8ec9-b567c7d55872\") " pod="metallb-system/frr-k8s-6g94f" Oct 01 13:57:51 crc kubenswrapper[4605]: I1001 13:57:51.614643 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lr72g\" (UniqueName: \"kubernetes.io/projected/3476bcf6-f4c4-4db8-8ec9-b567c7d55872-kube-api-access-lr72g\") pod \"frr-k8s-6g94f\" (UID: \"3476bcf6-f4c4-4db8-8ec9-b567c7d55872\") " pod="metallb-system/frr-k8s-6g94f" Oct 01 13:57:51 crc kubenswrapper[4605]: I1001 13:57:51.630735 4605 patch_prober.go:28] interesting pod/machine-config-daemon-zdjh7 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 13:57:51 crc kubenswrapper[4605]: I1001 13:57:51.630793 4605 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 13:57:51 crc kubenswrapper[4605]: I1001 13:57:51.630835 4605 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" Oct 01 13:57:51 crc kubenswrapper[4605]: I1001 13:57:51.631372 4605 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"57b43180ba9a3ef7b3e3cb1260253e07ed74787366be1e64f3a3708a8ee8ce49"} pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 01 13:57:51 crc kubenswrapper[4605]: I1001 13:57:51.631425 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" containerName="machine-config-daemon" containerID="cri-o://57b43180ba9a3ef7b3e3cb1260253e07ed74787366be1e64f3a3708a8ee8ce49" gracePeriod=600 Oct 01 13:57:51 crc kubenswrapper[4605]: I1001 13:57:51.688952 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/369f1f43-8697-402d-a370-f72c820ddf13-metallb-excludel2\") pod \"speaker-xrzw2\" (UID: \"369f1f43-8697-402d-a370-f72c820ddf13\") " pod="metallb-system/speaker-xrzw2" Oct 01 13:57:51 crc kubenswrapper[4605]: I1001 13:57:51.689022 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/369f1f43-8697-402d-a370-f72c820ddf13-memberlist\") pod \"speaker-xrzw2\" (UID: \"369f1f43-8697-402d-a370-f72c820ddf13\") " pod="metallb-system/speaker-xrzw2" Oct 01 13:57:51 crc kubenswrapper[4605]: I1001 13:57:51.689047 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2dlkz\" (UniqueName: \"kubernetes.io/projected/369f1f43-8697-402d-a370-f72c820ddf13-kube-api-access-2dlkz\") pod \"speaker-xrzw2\" (UID: \"369f1f43-8697-402d-a370-f72c820ddf13\") " pod="metallb-system/speaker-xrzw2" Oct 01 13:57:51 crc kubenswrapper[4605]: I1001 13:57:51.689064 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c9b364bc-86c5-4b79-b38a-d8bbf447be04-cert\") pod \"controller-5d688f5ffc-jlb75\" (UID: \"c9b364bc-86c5-4b79-b38a-d8bbf447be04\") " pod="metallb-system/controller-5d688f5ffc-jlb75" Oct 01 13:57:51 crc kubenswrapper[4605]: I1001 13:57:51.689127 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/369f1f43-8697-402d-a370-f72c820ddf13-metrics-certs\") pod \"speaker-xrzw2\" (UID: \"369f1f43-8697-402d-a370-f72c820ddf13\") " pod="metallb-system/speaker-xrzw2" Oct 01 13:57:51 crc kubenswrapper[4605]: I1001 13:57:51.689143 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c9b364bc-86c5-4b79-b38a-d8bbf447be04-metrics-certs\") pod \"controller-5d688f5ffc-jlb75\" (UID: \"c9b364bc-86c5-4b79-b38a-d8bbf447be04\") " pod="metallb-system/controller-5d688f5ffc-jlb75" Oct 01 13:57:51 crc kubenswrapper[4605]: I1001 13:57:51.689187 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hbwbf\" (UniqueName: \"kubernetes.io/projected/c9b364bc-86c5-4b79-b38a-d8bbf447be04-kube-api-access-hbwbf\") pod \"controller-5d688f5ffc-jlb75\" (UID: \"c9b364bc-86c5-4b79-b38a-d8bbf447be04\") " pod="metallb-system/controller-5d688f5ffc-jlb75" Oct 01 13:57:51 crc kubenswrapper[4605]: I1001 13:57:51.689990 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/369f1f43-8697-402d-a370-f72c820ddf13-metallb-excludel2\") pod \"speaker-xrzw2\" (UID: \"369f1f43-8697-402d-a370-f72c820ddf13\") " pod="metallb-system/speaker-xrzw2" Oct 01 13:57:51 crc kubenswrapper[4605]: E1001 13:57:51.690326 4605 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Oct 01 13:57:51 crc kubenswrapper[4605]: E1001 13:57:51.690365 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/369f1f43-8697-402d-a370-f72c820ddf13-memberlist podName:369f1f43-8697-402d-a370-f72c820ddf13 nodeName:}" failed. No retries permitted until 2025-10-01 13:57:52.190354638 +0000 UTC m=+794.934330836 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/369f1f43-8697-402d-a370-f72c820ddf13-memberlist") pod "speaker-xrzw2" (UID: "369f1f43-8697-402d-a370-f72c820ddf13") : secret "metallb-memberlist" not found Oct 01 13:57:51 crc kubenswrapper[4605]: E1001 13:57:51.691414 4605 secret.go:188] Couldn't get secret metallb-system/controller-certs-secret: secret "controller-certs-secret" not found Oct 01 13:57:51 crc kubenswrapper[4605]: E1001 13:57:51.691535 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c9b364bc-86c5-4b79-b38a-d8bbf447be04-metrics-certs podName:c9b364bc-86c5-4b79-b38a-d8bbf447be04 nodeName:}" failed. No retries permitted until 2025-10-01 13:57:52.191511847 +0000 UTC m=+794.935488135 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c9b364bc-86c5-4b79-b38a-d8bbf447be04-metrics-certs") pod "controller-5d688f5ffc-jlb75" (UID: "c9b364bc-86c5-4b79-b38a-d8bbf447be04") : secret "controller-certs-secret" not found Oct 01 13:57:51 crc kubenswrapper[4605]: I1001 13:57:51.693399 4605 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Oct 01 13:57:51 crc kubenswrapper[4605]: I1001 13:57:51.694362 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/369f1f43-8697-402d-a370-f72c820ddf13-metrics-certs\") pod \"speaker-xrzw2\" (UID: \"369f1f43-8697-402d-a370-f72c820ddf13\") " pod="metallb-system/speaker-xrzw2" Oct 01 13:57:51 crc kubenswrapper[4605]: I1001 13:57:51.705466 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c9b364bc-86c5-4b79-b38a-d8bbf447be04-cert\") pod \"controller-5d688f5ffc-jlb75\" (UID: \"c9b364bc-86c5-4b79-b38a-d8bbf447be04\") " pod="metallb-system/controller-5d688f5ffc-jlb75" Oct 01 13:57:51 crc kubenswrapper[4605]: I1001 13:57:51.713350 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hbwbf\" (UniqueName: \"kubernetes.io/projected/c9b364bc-86c5-4b79-b38a-d8bbf447be04-kube-api-access-hbwbf\") pod \"controller-5d688f5ffc-jlb75\" (UID: \"c9b364bc-86c5-4b79-b38a-d8bbf447be04\") " pod="metallb-system/controller-5d688f5ffc-jlb75" Oct 01 13:57:51 crc kubenswrapper[4605]: I1001 13:57:51.715615 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2dlkz\" (UniqueName: \"kubernetes.io/projected/369f1f43-8697-402d-a370-f72c820ddf13-kube-api-access-2dlkz\") pod \"speaker-xrzw2\" (UID: \"369f1f43-8697-402d-a370-f72c820ddf13\") " pod="metallb-system/speaker-xrzw2" Oct 01 13:57:51 crc kubenswrapper[4605]: I1001 13:57:51.742671 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-6g94f" Oct 01 13:57:52 crc kubenswrapper[4605]: I1001 13:57:52.039405 4605 generic.go:334] "Generic (PLEG): container finished" podID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" containerID="57b43180ba9a3ef7b3e3cb1260253e07ed74787366be1e64f3a3708a8ee8ce49" exitCode=0 Oct 01 13:57:52 crc kubenswrapper[4605]: I1001 13:57:52.039479 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" event={"ID":"f3023060-c8ae-492b-b1cb-a418d9a8e59f","Type":"ContainerDied","Data":"57b43180ba9a3ef7b3e3cb1260253e07ed74787366be1e64f3a3708a8ee8ce49"} Oct 01 13:57:52 crc kubenswrapper[4605]: I1001 13:57:52.039673 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" event={"ID":"f3023060-c8ae-492b-b1cb-a418d9a8e59f","Type":"ContainerStarted","Data":"1f69fd4c8b7ea593079cde275a0913d46f3db4c2d1ad72f22e5ac983a6cab564"} Oct 01 13:57:52 crc kubenswrapper[4605]: I1001 13:57:52.039690 4605 scope.go:117] "RemoveContainer" containerID="ab2e158b8dbbad131cf42220449cc89d2e9b1e83ce456c3ebd1aa3f78648df9e" Oct 01 13:57:52 crc kubenswrapper[4605]: I1001 13:57:52.041598 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-6g94f" event={"ID":"3476bcf6-f4c4-4db8-8ec9-b567c7d55872","Type":"ContainerStarted","Data":"001824e295b06daf4284832c0a734e95263c041e3ca8b5aa3bc7b021cc263e05"} Oct 01 13:57:52 crc kubenswrapper[4605]: I1001 13:57:52.093692 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3c7dd5c0-7119-4a6a-838d-a41cc422a655-cert\") pod \"frr-k8s-webhook-server-5478bdb765-bxrk6\" (UID: \"3c7dd5c0-7119-4a6a-838d-a41cc422a655\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-bxrk6" Oct 01 13:57:52 crc kubenswrapper[4605]: I1001 13:57:52.098644 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3c7dd5c0-7119-4a6a-838d-a41cc422a655-cert\") pod \"frr-k8s-webhook-server-5478bdb765-bxrk6\" (UID: \"3c7dd5c0-7119-4a6a-838d-a41cc422a655\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-bxrk6" Oct 01 13:57:52 crc kubenswrapper[4605]: I1001 13:57:52.195531 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/369f1f43-8697-402d-a370-f72c820ddf13-memberlist\") pod \"speaker-xrzw2\" (UID: \"369f1f43-8697-402d-a370-f72c820ddf13\") " pod="metallb-system/speaker-xrzw2" Oct 01 13:57:52 crc kubenswrapper[4605]: I1001 13:57:52.195616 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c9b364bc-86c5-4b79-b38a-d8bbf447be04-metrics-certs\") pod \"controller-5d688f5ffc-jlb75\" (UID: \"c9b364bc-86c5-4b79-b38a-d8bbf447be04\") " pod="metallb-system/controller-5d688f5ffc-jlb75" Oct 01 13:57:52 crc kubenswrapper[4605]: E1001 13:57:52.195707 4605 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Oct 01 13:57:52 crc kubenswrapper[4605]: E1001 13:57:52.195781 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/369f1f43-8697-402d-a370-f72c820ddf13-memberlist podName:369f1f43-8697-402d-a370-f72c820ddf13 nodeName:}" failed. No retries permitted until 2025-10-01 13:57:53.195763794 +0000 UTC m=+795.939740002 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/369f1f43-8697-402d-a370-f72c820ddf13-memberlist") pod "speaker-xrzw2" (UID: "369f1f43-8697-402d-a370-f72c820ddf13") : secret "metallb-memberlist" not found Oct 01 13:57:52 crc kubenswrapper[4605]: I1001 13:57:52.198687 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c9b364bc-86c5-4b79-b38a-d8bbf447be04-metrics-certs\") pod \"controller-5d688f5ffc-jlb75\" (UID: \"c9b364bc-86c5-4b79-b38a-d8bbf447be04\") " pod="metallb-system/controller-5d688f5ffc-jlb75" Oct 01 13:57:52 crc kubenswrapper[4605]: I1001 13:57:52.254333 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-x66kh"] Oct 01 13:57:52 crc kubenswrapper[4605]: I1001 13:57:52.255550 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-x66kh" Oct 01 13:57:52 crc kubenswrapper[4605]: I1001 13:57:52.295366 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-x66kh"] Oct 01 13:57:52 crc kubenswrapper[4605]: I1001 13:57:52.298015 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6e7ce887-7ce4-45b3-8b4c-2a33c8deea3f-catalog-content\") pod \"redhat-marketplace-x66kh\" (UID: \"6e7ce887-7ce4-45b3-8b4c-2a33c8deea3f\") " pod="openshift-marketplace/redhat-marketplace-x66kh" Oct 01 13:57:52 crc kubenswrapper[4605]: I1001 13:57:52.298066 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-trpln\" (UniqueName: \"kubernetes.io/projected/6e7ce887-7ce4-45b3-8b4c-2a33c8deea3f-kube-api-access-trpln\") pod \"redhat-marketplace-x66kh\" (UID: \"6e7ce887-7ce4-45b3-8b4c-2a33c8deea3f\") " pod="openshift-marketplace/redhat-marketplace-x66kh" Oct 01 13:57:52 crc kubenswrapper[4605]: I1001 13:57:52.298092 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6e7ce887-7ce4-45b3-8b4c-2a33c8deea3f-utilities\") pod \"redhat-marketplace-x66kh\" (UID: \"6e7ce887-7ce4-45b3-8b4c-2a33c8deea3f\") " pod="openshift-marketplace/redhat-marketplace-x66kh" Oct 01 13:57:52 crc kubenswrapper[4605]: I1001 13:57:52.336049 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-bxrk6" Oct 01 13:57:52 crc kubenswrapper[4605]: I1001 13:57:52.399619 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6e7ce887-7ce4-45b3-8b4c-2a33c8deea3f-catalog-content\") pod \"redhat-marketplace-x66kh\" (UID: \"6e7ce887-7ce4-45b3-8b4c-2a33c8deea3f\") " pod="openshift-marketplace/redhat-marketplace-x66kh" Oct 01 13:57:52 crc kubenswrapper[4605]: I1001 13:57:52.399667 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-trpln\" (UniqueName: \"kubernetes.io/projected/6e7ce887-7ce4-45b3-8b4c-2a33c8deea3f-kube-api-access-trpln\") pod \"redhat-marketplace-x66kh\" (UID: \"6e7ce887-7ce4-45b3-8b4c-2a33c8deea3f\") " pod="openshift-marketplace/redhat-marketplace-x66kh" Oct 01 13:57:52 crc kubenswrapper[4605]: I1001 13:57:52.399683 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6e7ce887-7ce4-45b3-8b4c-2a33c8deea3f-utilities\") pod \"redhat-marketplace-x66kh\" (UID: \"6e7ce887-7ce4-45b3-8b4c-2a33c8deea3f\") " pod="openshift-marketplace/redhat-marketplace-x66kh" Oct 01 13:57:52 crc kubenswrapper[4605]: I1001 13:57:52.400194 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6e7ce887-7ce4-45b3-8b4c-2a33c8deea3f-utilities\") pod \"redhat-marketplace-x66kh\" (UID: \"6e7ce887-7ce4-45b3-8b4c-2a33c8deea3f\") " pod="openshift-marketplace/redhat-marketplace-x66kh" Oct 01 13:57:52 crc kubenswrapper[4605]: I1001 13:57:52.400418 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6e7ce887-7ce4-45b3-8b4c-2a33c8deea3f-catalog-content\") pod \"redhat-marketplace-x66kh\" (UID: \"6e7ce887-7ce4-45b3-8b4c-2a33c8deea3f\") " pod="openshift-marketplace/redhat-marketplace-x66kh" Oct 01 13:57:52 crc kubenswrapper[4605]: I1001 13:57:52.425201 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-trpln\" (UniqueName: \"kubernetes.io/projected/6e7ce887-7ce4-45b3-8b4c-2a33c8deea3f-kube-api-access-trpln\") pod \"redhat-marketplace-x66kh\" (UID: \"6e7ce887-7ce4-45b3-8b4c-2a33c8deea3f\") " pod="openshift-marketplace/redhat-marketplace-x66kh" Oct 01 13:57:52 crc kubenswrapper[4605]: I1001 13:57:52.468474 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-5d688f5ffc-jlb75" Oct 01 13:57:52 crc kubenswrapper[4605]: I1001 13:57:52.591540 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-x66kh" Oct 01 13:57:52 crc kubenswrapper[4605]: I1001 13:57:52.823087 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-5478bdb765-bxrk6"] Oct 01 13:57:52 crc kubenswrapper[4605]: I1001 13:57:52.907525 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-5d688f5ffc-jlb75"] Oct 01 13:57:52 crc kubenswrapper[4605]: W1001 13:57:52.910275 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc9b364bc_86c5_4b79_b38a_d8bbf447be04.slice/crio-091cef7ddbd868d95612c18d8e7559f89b486955954d3dc0511e56f92b952248 WatchSource:0}: Error finding container 091cef7ddbd868d95612c18d8e7559f89b486955954d3dc0511e56f92b952248: Status 404 returned error can't find the container with id 091cef7ddbd868d95612c18d8e7559f89b486955954d3dc0511e56f92b952248 Oct 01 13:57:53 crc kubenswrapper[4605]: I1001 13:57:53.030129 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-x66kh"] Oct 01 13:57:53 crc kubenswrapper[4605]: W1001 13:57:53.036546 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6e7ce887_7ce4_45b3_8b4c_2a33c8deea3f.slice/crio-42b586f9722319956e31c6991ab76986a30f0e6fab41423d8fa9aa2b9ec04d4c WatchSource:0}: Error finding container 42b586f9722319956e31c6991ab76986a30f0e6fab41423d8fa9aa2b9ec04d4c: Status 404 returned error can't find the container with id 42b586f9722319956e31c6991ab76986a30f0e6fab41423d8fa9aa2b9ec04d4c Oct 01 13:57:53 crc kubenswrapper[4605]: I1001 13:57:53.047238 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-x66kh" event={"ID":"6e7ce887-7ce4-45b3-8b4c-2a33c8deea3f","Type":"ContainerStarted","Data":"42b586f9722319956e31c6991ab76986a30f0e6fab41423d8fa9aa2b9ec04d4c"} Oct 01 13:57:53 crc kubenswrapper[4605]: I1001 13:57:53.051117 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-bxrk6" event={"ID":"3c7dd5c0-7119-4a6a-838d-a41cc422a655","Type":"ContainerStarted","Data":"2a2b1739da093258d50023d7999304550625e0c9f4e80928cd3851c8fcb6998c"} Oct 01 13:57:53 crc kubenswrapper[4605]: I1001 13:57:53.055059 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5d688f5ffc-jlb75" event={"ID":"c9b364bc-86c5-4b79-b38a-d8bbf447be04","Type":"ContainerStarted","Data":"6c36cac9a23ccff790a75c3b331cb231762a18281264a8a406666f23c3bc9191"} Oct 01 13:57:53 crc kubenswrapper[4605]: I1001 13:57:53.055087 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5d688f5ffc-jlb75" event={"ID":"c9b364bc-86c5-4b79-b38a-d8bbf447be04","Type":"ContainerStarted","Data":"091cef7ddbd868d95612c18d8e7559f89b486955954d3dc0511e56f92b952248"} Oct 01 13:57:53 crc kubenswrapper[4605]: I1001 13:57:53.211196 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/369f1f43-8697-402d-a370-f72c820ddf13-memberlist\") pod \"speaker-xrzw2\" (UID: \"369f1f43-8697-402d-a370-f72c820ddf13\") " pod="metallb-system/speaker-xrzw2" Oct 01 13:57:53 crc kubenswrapper[4605]: I1001 13:57:53.217907 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/369f1f43-8697-402d-a370-f72c820ddf13-memberlist\") pod \"speaker-xrzw2\" (UID: \"369f1f43-8697-402d-a370-f72c820ddf13\") " pod="metallb-system/speaker-xrzw2" Oct 01 13:57:53 crc kubenswrapper[4605]: I1001 13:57:53.319766 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-xrzw2" Oct 01 13:57:53 crc kubenswrapper[4605]: W1001 13:57:53.337725 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod369f1f43_8697_402d_a370_f72c820ddf13.slice/crio-5075fedefdad08db9bed5beaed4765195723487bb106b6759a4983b46358ee9e WatchSource:0}: Error finding container 5075fedefdad08db9bed5beaed4765195723487bb106b6759a4983b46358ee9e: Status 404 returned error can't find the container with id 5075fedefdad08db9bed5beaed4765195723487bb106b6759a4983b46358ee9e Oct 01 13:57:54 crc kubenswrapper[4605]: I1001 13:57:54.060735 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5d688f5ffc-jlb75" event={"ID":"c9b364bc-86c5-4b79-b38a-d8bbf447be04","Type":"ContainerStarted","Data":"f8622411b66e618310bb5508d5fb5c704c4d2cd5d909297af5220268c740ce44"} Oct 01 13:57:54 crc kubenswrapper[4605]: I1001 13:57:54.061005 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-5d688f5ffc-jlb75" Oct 01 13:57:54 crc kubenswrapper[4605]: I1001 13:57:54.062032 4605 generic.go:334] "Generic (PLEG): container finished" podID="6e7ce887-7ce4-45b3-8b4c-2a33c8deea3f" containerID="3ce83d61db50295a2984cdf101ea7dde8751895b966e242d77822369712d6177" exitCode=0 Oct 01 13:57:54 crc kubenswrapper[4605]: I1001 13:57:54.062081 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-x66kh" event={"ID":"6e7ce887-7ce4-45b3-8b4c-2a33c8deea3f","Type":"ContainerDied","Data":"3ce83d61db50295a2984cdf101ea7dde8751895b966e242d77822369712d6177"} Oct 01 13:57:54 crc kubenswrapper[4605]: I1001 13:57:54.065033 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-xrzw2" event={"ID":"369f1f43-8697-402d-a370-f72c820ddf13","Type":"ContainerStarted","Data":"ca4e572e36bc792fc5434035d2bf6a606613b9dfb0c3531f72e71420d321f905"} Oct 01 13:57:54 crc kubenswrapper[4605]: I1001 13:57:54.065055 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-xrzw2" event={"ID":"369f1f43-8697-402d-a370-f72c820ddf13","Type":"ContainerStarted","Data":"7c070d36374443f0f46c8d21ab44d662d608ca556d0df54317207c913bae37f3"} Oct 01 13:57:54 crc kubenswrapper[4605]: I1001 13:57:54.065064 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-xrzw2" event={"ID":"369f1f43-8697-402d-a370-f72c820ddf13","Type":"ContainerStarted","Data":"5075fedefdad08db9bed5beaed4765195723487bb106b6759a4983b46358ee9e"} Oct 01 13:57:54 crc kubenswrapper[4605]: I1001 13:57:54.065230 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-xrzw2" Oct 01 13:57:54 crc kubenswrapper[4605]: I1001 13:57:54.079741 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-5d688f5ffc-jlb75" podStartSLOduration=3.079724814 podStartE2EDuration="3.079724814s" podCreationTimestamp="2025-10-01 13:57:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 13:57:54.079077157 +0000 UTC m=+796.823053365" watchObservedRunningTime="2025-10-01 13:57:54.079724814 +0000 UTC m=+796.823701022" Oct 01 13:57:54 crc kubenswrapper[4605]: I1001 13:57:54.122947 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-xrzw2" podStartSLOduration=3.122931501 podStartE2EDuration="3.122931501s" podCreationTimestamp="2025-10-01 13:57:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 13:57:54.119561765 +0000 UTC m=+796.863537973" watchObservedRunningTime="2025-10-01 13:57:54.122931501 +0000 UTC m=+796.866907709" Oct 01 13:57:56 crc kubenswrapper[4605]: I1001 13:57:56.088973 4605 generic.go:334] "Generic (PLEG): container finished" podID="6e7ce887-7ce4-45b3-8b4c-2a33c8deea3f" containerID="c6788b8e9a8e2014697eb1ed4b700b6f68ce96977dd7ce93a0ddd87b37c4c37c" exitCode=0 Oct 01 13:57:56 crc kubenswrapper[4605]: I1001 13:57:56.089451 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-x66kh" event={"ID":"6e7ce887-7ce4-45b3-8b4c-2a33c8deea3f","Type":"ContainerDied","Data":"c6788b8e9a8e2014697eb1ed4b700b6f68ce96977dd7ce93a0ddd87b37c4c37c"} Oct 01 13:57:57 crc kubenswrapper[4605]: I1001 13:57:57.104657 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-x66kh" event={"ID":"6e7ce887-7ce4-45b3-8b4c-2a33c8deea3f","Type":"ContainerStarted","Data":"9ddbd68921fd2080c77e149f1ef4e930a81377f4ed760163e2d31cf751d12525"} Oct 01 13:57:57 crc kubenswrapper[4605]: I1001 13:57:57.123190 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-x66kh" podStartSLOduration=2.625454074 podStartE2EDuration="5.123174051s" podCreationTimestamp="2025-10-01 13:57:52 +0000 UTC" firstStartedPulling="2025-10-01 13:57:54.063256765 +0000 UTC m=+796.807232973" lastFinishedPulling="2025-10-01 13:57:56.560976742 +0000 UTC m=+799.304952950" observedRunningTime="2025-10-01 13:57:57.122787471 +0000 UTC m=+799.866763679" watchObservedRunningTime="2025-10-01 13:57:57.123174051 +0000 UTC m=+799.867150259" Oct 01 13:57:58 crc kubenswrapper[4605]: I1001 13:57:58.039680 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-mkwnf"] Oct 01 13:57:58 crc kubenswrapper[4605]: I1001 13:57:58.040796 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mkwnf" Oct 01 13:57:58 crc kubenswrapper[4605]: I1001 13:57:58.046772 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mkwnf"] Oct 01 13:57:58 crc kubenswrapper[4605]: I1001 13:57:58.078791 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vv646\" (UniqueName: \"kubernetes.io/projected/ac6ce825-737e-4f31-9fc1-54b0cc9ff36d-kube-api-access-vv646\") pod \"certified-operators-mkwnf\" (UID: \"ac6ce825-737e-4f31-9fc1-54b0cc9ff36d\") " pod="openshift-marketplace/certified-operators-mkwnf" Oct 01 13:57:58 crc kubenswrapper[4605]: I1001 13:57:58.078833 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac6ce825-737e-4f31-9fc1-54b0cc9ff36d-utilities\") pod \"certified-operators-mkwnf\" (UID: \"ac6ce825-737e-4f31-9fc1-54b0cc9ff36d\") " pod="openshift-marketplace/certified-operators-mkwnf" Oct 01 13:57:58 crc kubenswrapper[4605]: I1001 13:57:58.078913 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac6ce825-737e-4f31-9fc1-54b0cc9ff36d-catalog-content\") pod \"certified-operators-mkwnf\" (UID: \"ac6ce825-737e-4f31-9fc1-54b0cc9ff36d\") " pod="openshift-marketplace/certified-operators-mkwnf" Oct 01 13:57:58 crc kubenswrapper[4605]: I1001 13:57:58.179967 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac6ce825-737e-4f31-9fc1-54b0cc9ff36d-catalog-content\") pod \"certified-operators-mkwnf\" (UID: \"ac6ce825-737e-4f31-9fc1-54b0cc9ff36d\") " pod="openshift-marketplace/certified-operators-mkwnf" Oct 01 13:57:58 crc kubenswrapper[4605]: I1001 13:57:58.180336 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vv646\" (UniqueName: \"kubernetes.io/projected/ac6ce825-737e-4f31-9fc1-54b0cc9ff36d-kube-api-access-vv646\") pod \"certified-operators-mkwnf\" (UID: \"ac6ce825-737e-4f31-9fc1-54b0cc9ff36d\") " pod="openshift-marketplace/certified-operators-mkwnf" Oct 01 13:57:58 crc kubenswrapper[4605]: I1001 13:57:58.180359 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac6ce825-737e-4f31-9fc1-54b0cc9ff36d-utilities\") pod \"certified-operators-mkwnf\" (UID: \"ac6ce825-737e-4f31-9fc1-54b0cc9ff36d\") " pod="openshift-marketplace/certified-operators-mkwnf" Oct 01 13:57:58 crc kubenswrapper[4605]: I1001 13:57:58.180462 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac6ce825-737e-4f31-9fc1-54b0cc9ff36d-catalog-content\") pod \"certified-operators-mkwnf\" (UID: \"ac6ce825-737e-4f31-9fc1-54b0cc9ff36d\") " pod="openshift-marketplace/certified-operators-mkwnf" Oct 01 13:57:58 crc kubenswrapper[4605]: I1001 13:57:58.180721 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac6ce825-737e-4f31-9fc1-54b0cc9ff36d-utilities\") pod \"certified-operators-mkwnf\" (UID: \"ac6ce825-737e-4f31-9fc1-54b0cc9ff36d\") " pod="openshift-marketplace/certified-operators-mkwnf" Oct 01 13:57:58 crc kubenswrapper[4605]: I1001 13:57:58.222936 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vv646\" (UniqueName: \"kubernetes.io/projected/ac6ce825-737e-4f31-9fc1-54b0cc9ff36d-kube-api-access-vv646\") pod \"certified-operators-mkwnf\" (UID: \"ac6ce825-737e-4f31-9fc1-54b0cc9ff36d\") " pod="openshift-marketplace/certified-operators-mkwnf" Oct 01 13:57:58 crc kubenswrapper[4605]: I1001 13:57:58.400556 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mkwnf" Oct 01 13:58:01 crc kubenswrapper[4605]: I1001 13:58:01.108834 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mkwnf"] Oct 01 13:58:01 crc kubenswrapper[4605]: W1001 13:58:01.121426 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podac6ce825_737e_4f31_9fc1_54b0cc9ff36d.slice/crio-b9ddc2ff7bfa09d6890fa6a43d728ba8b8b8e0f2ca0b8f7f3b588fea1659de7c WatchSource:0}: Error finding container b9ddc2ff7bfa09d6890fa6a43d728ba8b8b8e0f2ca0b8f7f3b588fea1659de7c: Status 404 returned error can't find the container with id b9ddc2ff7bfa09d6890fa6a43d728ba8b8b8e0f2ca0b8f7f3b588fea1659de7c Oct 01 13:58:01 crc kubenswrapper[4605]: I1001 13:58:01.134555 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mkwnf" event={"ID":"ac6ce825-737e-4f31-9fc1-54b0cc9ff36d","Type":"ContainerStarted","Data":"b9ddc2ff7bfa09d6890fa6a43d728ba8b8b8e0f2ca0b8f7f3b588fea1659de7c"} Oct 01 13:58:02 crc kubenswrapper[4605]: I1001 13:58:02.142258 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-bxrk6" event={"ID":"3c7dd5c0-7119-4a6a-838d-a41cc422a655","Type":"ContainerStarted","Data":"4ddf93eb46e0c250ea15373ee736656b09de06b4de75104c9bb39f8c542dafd1"} Oct 01 13:58:02 crc kubenswrapper[4605]: I1001 13:58:02.143390 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-bxrk6" Oct 01 13:58:02 crc kubenswrapper[4605]: I1001 13:58:02.144391 4605 generic.go:334] "Generic (PLEG): container finished" podID="ac6ce825-737e-4f31-9fc1-54b0cc9ff36d" containerID="81a56d1d6996b0990c2bc5d6a4557c380646ff543d46d45e956aac7451885246" exitCode=0 Oct 01 13:58:02 crc kubenswrapper[4605]: I1001 13:58:02.144443 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mkwnf" event={"ID":"ac6ce825-737e-4f31-9fc1-54b0cc9ff36d","Type":"ContainerDied","Data":"81a56d1d6996b0990c2bc5d6a4557c380646ff543d46d45e956aac7451885246"} Oct 01 13:58:02 crc kubenswrapper[4605]: I1001 13:58:02.148554 4605 generic.go:334] "Generic (PLEG): container finished" podID="3476bcf6-f4c4-4db8-8ec9-b567c7d55872" containerID="4cbb108ac021d927e9a3e967a40f9b5a839aff89edd408501003b6f1b56b37cb" exitCode=0 Oct 01 13:58:02 crc kubenswrapper[4605]: I1001 13:58:02.148590 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-6g94f" event={"ID":"3476bcf6-f4c4-4db8-8ec9-b567c7d55872","Type":"ContainerDied","Data":"4cbb108ac021d927e9a3e967a40f9b5a839aff89edd408501003b6f1b56b37cb"} Oct 01 13:58:02 crc kubenswrapper[4605]: I1001 13:58:02.165018 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-bxrk6" podStartSLOduration=2.829398625 podStartE2EDuration="11.164996753s" podCreationTimestamp="2025-10-01 13:57:51 +0000 UTC" firstStartedPulling="2025-10-01 13:57:52.828980317 +0000 UTC m=+795.572956525" lastFinishedPulling="2025-10-01 13:58:01.164578445 +0000 UTC m=+803.908554653" observedRunningTime="2025-10-01 13:58:02.160706644 +0000 UTC m=+804.904682852" watchObservedRunningTime="2025-10-01 13:58:02.164996753 +0000 UTC m=+804.908972981" Oct 01 13:58:02 crc kubenswrapper[4605]: I1001 13:58:02.591894 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-x66kh" Oct 01 13:58:02 crc kubenswrapper[4605]: I1001 13:58:02.592345 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-x66kh" Oct 01 13:58:02 crc kubenswrapper[4605]: I1001 13:58:02.628640 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-x66kh" Oct 01 13:58:03 crc kubenswrapper[4605]: I1001 13:58:03.155454 4605 generic.go:334] "Generic (PLEG): container finished" podID="3476bcf6-f4c4-4db8-8ec9-b567c7d55872" containerID="06783e5a753b72c8c3084aff5ccd7a2f44d7f83adc6bd091fa3b1580a4a5cd51" exitCode=0 Oct 01 13:58:03 crc kubenswrapper[4605]: I1001 13:58:03.155544 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-6g94f" event={"ID":"3476bcf6-f4c4-4db8-8ec9-b567c7d55872","Type":"ContainerDied","Data":"06783e5a753b72c8c3084aff5ccd7a2f44d7f83adc6bd091fa3b1580a4a5cd51"} Oct 01 13:58:03 crc kubenswrapper[4605]: I1001 13:58:03.205695 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-x66kh" Oct 01 13:58:03 crc kubenswrapper[4605]: I1001 13:58:03.324406 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-xrzw2" Oct 01 13:58:03 crc kubenswrapper[4605]: I1001 13:58:03.819331 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-x66kh"] Oct 01 13:58:04 crc kubenswrapper[4605]: I1001 13:58:04.162603 4605 generic.go:334] "Generic (PLEG): container finished" podID="ac6ce825-737e-4f31-9fc1-54b0cc9ff36d" containerID="7170d9fefa54643bc31dae76e84f4d1b1216fc0f26690312b0d264df574ed8c5" exitCode=0 Oct 01 13:58:04 crc kubenswrapper[4605]: I1001 13:58:04.162658 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mkwnf" event={"ID":"ac6ce825-737e-4f31-9fc1-54b0cc9ff36d","Type":"ContainerDied","Data":"7170d9fefa54643bc31dae76e84f4d1b1216fc0f26690312b0d264df574ed8c5"} Oct 01 13:58:04 crc kubenswrapper[4605]: I1001 13:58:04.167566 4605 generic.go:334] "Generic (PLEG): container finished" podID="3476bcf6-f4c4-4db8-8ec9-b567c7d55872" containerID="8a747577dd852d8323501691212d107c8ca4147f0b9efe35654bdbde32d3587f" exitCode=0 Oct 01 13:58:04 crc kubenswrapper[4605]: I1001 13:58:04.168241 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-6g94f" event={"ID":"3476bcf6-f4c4-4db8-8ec9-b567c7d55872","Type":"ContainerDied","Data":"8a747577dd852d8323501691212d107c8ca4147f0b9efe35654bdbde32d3587f"} Oct 01 13:58:05 crc kubenswrapper[4605]: I1001 13:58:05.178634 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-6g94f" event={"ID":"3476bcf6-f4c4-4db8-8ec9-b567c7d55872","Type":"ContainerStarted","Data":"545ab1bf9a990567222d05dec89275cce45ea3f6a26363b332555428228f42db"} Oct 01 13:58:05 crc kubenswrapper[4605]: I1001 13:58:05.179020 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-6g94f" event={"ID":"3476bcf6-f4c4-4db8-8ec9-b567c7d55872","Type":"ContainerStarted","Data":"2cfc7e5ae4c4c490926ea0fb0d06cb0ffcd74b9879b5ce9d18893ff475222000"} Oct 01 13:58:05 crc kubenswrapper[4605]: I1001 13:58:05.179030 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-6g94f" event={"ID":"3476bcf6-f4c4-4db8-8ec9-b567c7d55872","Type":"ContainerStarted","Data":"0df289b4ad2d832aa5aa07380bf9b62eb405422890943d7c62ce7740de56dc53"} Oct 01 13:58:05 crc kubenswrapper[4605]: I1001 13:58:05.179038 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-6g94f" event={"ID":"3476bcf6-f4c4-4db8-8ec9-b567c7d55872","Type":"ContainerStarted","Data":"0c611471edcc3ac323aaffbe321d5115a11f90c0831930dffb4095cae61ce730"} Oct 01 13:58:05 crc kubenswrapper[4605]: I1001 13:58:05.179046 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-6g94f" event={"ID":"3476bcf6-f4c4-4db8-8ec9-b567c7d55872","Type":"ContainerStarted","Data":"ebe792f10de4adcf7574a19d3cede517647105d97db5345e588fbb2c125c0431"} Oct 01 13:58:05 crc kubenswrapper[4605]: I1001 13:58:05.182261 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mkwnf" event={"ID":"ac6ce825-737e-4f31-9fc1-54b0cc9ff36d","Type":"ContainerStarted","Data":"8d76054a0855bc6212b2c04191b3e688b3f6efbdb3d17885aeb320fb1e5f249b"} Oct 01 13:58:05 crc kubenswrapper[4605]: I1001 13:58:05.182397 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-x66kh" podUID="6e7ce887-7ce4-45b3-8b4c-2a33c8deea3f" containerName="registry-server" containerID="cri-o://9ddbd68921fd2080c77e149f1ef4e930a81377f4ed760163e2d31cf751d12525" gracePeriod=2 Oct 01 13:58:05 crc kubenswrapper[4605]: I1001 13:58:05.203189 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-mkwnf" podStartSLOduration=4.697703342 podStartE2EDuration="7.203169537s" podCreationTimestamp="2025-10-01 13:57:58 +0000 UTC" firstStartedPulling="2025-10-01 13:58:02.145964919 +0000 UTC m=+804.889941137" lastFinishedPulling="2025-10-01 13:58:04.651431124 +0000 UTC m=+807.395407332" observedRunningTime="2025-10-01 13:58:05.199061843 +0000 UTC m=+807.943038061" watchObservedRunningTime="2025-10-01 13:58:05.203169537 +0000 UTC m=+807.947145755" Oct 01 13:58:05 crc kubenswrapper[4605]: I1001 13:58:05.571998 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-x66kh" Oct 01 13:58:05 crc kubenswrapper[4605]: I1001 13:58:05.691767 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6e7ce887-7ce4-45b3-8b4c-2a33c8deea3f-catalog-content\") pod \"6e7ce887-7ce4-45b3-8b4c-2a33c8deea3f\" (UID: \"6e7ce887-7ce4-45b3-8b4c-2a33c8deea3f\") " Oct 01 13:58:05 crc kubenswrapper[4605]: I1001 13:58:05.691901 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-trpln\" (UniqueName: \"kubernetes.io/projected/6e7ce887-7ce4-45b3-8b4c-2a33c8deea3f-kube-api-access-trpln\") pod \"6e7ce887-7ce4-45b3-8b4c-2a33c8deea3f\" (UID: \"6e7ce887-7ce4-45b3-8b4c-2a33c8deea3f\") " Oct 01 13:58:05 crc kubenswrapper[4605]: I1001 13:58:05.691970 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6e7ce887-7ce4-45b3-8b4c-2a33c8deea3f-utilities\") pod \"6e7ce887-7ce4-45b3-8b4c-2a33c8deea3f\" (UID: \"6e7ce887-7ce4-45b3-8b4c-2a33c8deea3f\") " Oct 01 13:58:05 crc kubenswrapper[4605]: I1001 13:58:05.692774 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6e7ce887-7ce4-45b3-8b4c-2a33c8deea3f-utilities" (OuterVolumeSpecName: "utilities") pod "6e7ce887-7ce4-45b3-8b4c-2a33c8deea3f" (UID: "6e7ce887-7ce4-45b3-8b4c-2a33c8deea3f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 13:58:05 crc kubenswrapper[4605]: I1001 13:58:05.697392 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6e7ce887-7ce4-45b3-8b4c-2a33c8deea3f-kube-api-access-trpln" (OuterVolumeSpecName: "kube-api-access-trpln") pod "6e7ce887-7ce4-45b3-8b4c-2a33c8deea3f" (UID: "6e7ce887-7ce4-45b3-8b4c-2a33c8deea3f"). InnerVolumeSpecName "kube-api-access-trpln". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:58:05 crc kubenswrapper[4605]: I1001 13:58:05.706353 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6e7ce887-7ce4-45b3-8b4c-2a33c8deea3f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6e7ce887-7ce4-45b3-8b4c-2a33c8deea3f" (UID: "6e7ce887-7ce4-45b3-8b4c-2a33c8deea3f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 13:58:05 crc kubenswrapper[4605]: I1001 13:58:05.793864 4605 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6e7ce887-7ce4-45b3-8b4c-2a33c8deea3f-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 13:58:05 crc kubenswrapper[4605]: I1001 13:58:05.793903 4605 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6e7ce887-7ce4-45b3-8b4c-2a33c8deea3f-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 13:58:05 crc kubenswrapper[4605]: I1001 13:58:05.793917 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-trpln\" (UniqueName: \"kubernetes.io/projected/6e7ce887-7ce4-45b3-8b4c-2a33c8deea3f-kube-api-access-trpln\") on node \"crc\" DevicePath \"\"" Oct 01 13:58:06 crc kubenswrapper[4605]: I1001 13:58:06.191710 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-6g94f" event={"ID":"3476bcf6-f4c4-4db8-8ec9-b567c7d55872","Type":"ContainerStarted","Data":"dd38844f0bb0626dd205f8ca9fd01764852e767888ed12b40a7bd105952175c7"} Oct 01 13:58:06 crc kubenswrapper[4605]: I1001 13:58:06.192218 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-6g94f" Oct 01 13:58:06 crc kubenswrapper[4605]: I1001 13:58:06.194764 4605 generic.go:334] "Generic (PLEG): container finished" podID="6e7ce887-7ce4-45b3-8b4c-2a33c8deea3f" containerID="9ddbd68921fd2080c77e149f1ef4e930a81377f4ed760163e2d31cf751d12525" exitCode=0 Oct 01 13:58:06 crc kubenswrapper[4605]: I1001 13:58:06.194864 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-x66kh" Oct 01 13:58:06 crc kubenswrapper[4605]: I1001 13:58:06.194881 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-x66kh" event={"ID":"6e7ce887-7ce4-45b3-8b4c-2a33c8deea3f","Type":"ContainerDied","Data":"9ddbd68921fd2080c77e149f1ef4e930a81377f4ed760163e2d31cf751d12525"} Oct 01 13:58:06 crc kubenswrapper[4605]: I1001 13:58:06.194935 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-x66kh" event={"ID":"6e7ce887-7ce4-45b3-8b4c-2a33c8deea3f","Type":"ContainerDied","Data":"42b586f9722319956e31c6991ab76986a30f0e6fab41423d8fa9aa2b9ec04d4c"} Oct 01 13:58:06 crc kubenswrapper[4605]: I1001 13:58:06.194961 4605 scope.go:117] "RemoveContainer" containerID="9ddbd68921fd2080c77e149f1ef4e930a81377f4ed760163e2d31cf751d12525" Oct 01 13:58:06 crc kubenswrapper[4605]: I1001 13:58:06.220810 4605 scope.go:117] "RemoveContainer" containerID="c6788b8e9a8e2014697eb1ed4b700b6f68ce96977dd7ce93a0ddd87b37c4c37c" Oct 01 13:58:06 crc kubenswrapper[4605]: I1001 13:58:06.237951 4605 scope.go:117] "RemoveContainer" containerID="3ce83d61db50295a2984cdf101ea7dde8751895b966e242d77822369712d6177" Oct 01 13:58:06 crc kubenswrapper[4605]: I1001 13:58:06.248774 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-6g94f" podStartSLOduration=5.973639622 podStartE2EDuration="15.248756562s" podCreationTimestamp="2025-10-01 13:57:51 +0000 UTC" firstStartedPulling="2025-10-01 13:57:51.85070132 +0000 UTC m=+794.594677528" lastFinishedPulling="2025-10-01 13:58:01.12581826 +0000 UTC m=+803.869794468" observedRunningTime="2025-10-01 13:58:06.248191328 +0000 UTC m=+808.992167536" watchObservedRunningTime="2025-10-01 13:58:06.248756562 +0000 UTC m=+808.992732760" Oct 01 13:58:06 crc kubenswrapper[4605]: I1001 13:58:06.263737 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-x66kh"] Oct 01 13:58:06 crc kubenswrapper[4605]: I1001 13:58:06.268477 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-x66kh"] Oct 01 13:58:06 crc kubenswrapper[4605]: I1001 13:58:06.269144 4605 scope.go:117] "RemoveContainer" containerID="9ddbd68921fd2080c77e149f1ef4e930a81377f4ed760163e2d31cf751d12525" Oct 01 13:58:06 crc kubenswrapper[4605]: E1001 13:58:06.269560 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9ddbd68921fd2080c77e149f1ef4e930a81377f4ed760163e2d31cf751d12525\": container with ID starting with 9ddbd68921fd2080c77e149f1ef4e930a81377f4ed760163e2d31cf751d12525 not found: ID does not exist" containerID="9ddbd68921fd2080c77e149f1ef4e930a81377f4ed760163e2d31cf751d12525" Oct 01 13:58:06 crc kubenswrapper[4605]: I1001 13:58:06.269586 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9ddbd68921fd2080c77e149f1ef4e930a81377f4ed760163e2d31cf751d12525"} err="failed to get container status \"9ddbd68921fd2080c77e149f1ef4e930a81377f4ed760163e2d31cf751d12525\": rpc error: code = NotFound desc = could not find container \"9ddbd68921fd2080c77e149f1ef4e930a81377f4ed760163e2d31cf751d12525\": container with ID starting with 9ddbd68921fd2080c77e149f1ef4e930a81377f4ed760163e2d31cf751d12525 not found: ID does not exist" Oct 01 13:58:06 crc kubenswrapper[4605]: I1001 13:58:06.269610 4605 scope.go:117] "RemoveContainer" containerID="c6788b8e9a8e2014697eb1ed4b700b6f68ce96977dd7ce93a0ddd87b37c4c37c" Oct 01 13:58:06 crc kubenswrapper[4605]: E1001 13:58:06.269794 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c6788b8e9a8e2014697eb1ed4b700b6f68ce96977dd7ce93a0ddd87b37c4c37c\": container with ID starting with c6788b8e9a8e2014697eb1ed4b700b6f68ce96977dd7ce93a0ddd87b37c4c37c not found: ID does not exist" containerID="c6788b8e9a8e2014697eb1ed4b700b6f68ce96977dd7ce93a0ddd87b37c4c37c" Oct 01 13:58:06 crc kubenswrapper[4605]: I1001 13:58:06.269826 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c6788b8e9a8e2014697eb1ed4b700b6f68ce96977dd7ce93a0ddd87b37c4c37c"} err="failed to get container status \"c6788b8e9a8e2014697eb1ed4b700b6f68ce96977dd7ce93a0ddd87b37c4c37c\": rpc error: code = NotFound desc = could not find container \"c6788b8e9a8e2014697eb1ed4b700b6f68ce96977dd7ce93a0ddd87b37c4c37c\": container with ID starting with c6788b8e9a8e2014697eb1ed4b700b6f68ce96977dd7ce93a0ddd87b37c4c37c not found: ID does not exist" Oct 01 13:58:06 crc kubenswrapper[4605]: I1001 13:58:06.269839 4605 scope.go:117] "RemoveContainer" containerID="3ce83d61db50295a2984cdf101ea7dde8751895b966e242d77822369712d6177" Oct 01 13:58:06 crc kubenswrapper[4605]: E1001 13:58:06.273167 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3ce83d61db50295a2984cdf101ea7dde8751895b966e242d77822369712d6177\": container with ID starting with 3ce83d61db50295a2984cdf101ea7dde8751895b966e242d77822369712d6177 not found: ID does not exist" containerID="3ce83d61db50295a2984cdf101ea7dde8751895b966e242d77822369712d6177" Oct 01 13:58:06 crc kubenswrapper[4605]: I1001 13:58:06.273193 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3ce83d61db50295a2984cdf101ea7dde8751895b966e242d77822369712d6177"} err="failed to get container status \"3ce83d61db50295a2984cdf101ea7dde8751895b966e242d77822369712d6177\": rpc error: code = NotFound desc = could not find container \"3ce83d61db50295a2984cdf101ea7dde8751895b966e242d77822369712d6177\": container with ID starting with 3ce83d61db50295a2984cdf101ea7dde8751895b966e242d77822369712d6177 not found: ID does not exist" Oct 01 13:58:06 crc kubenswrapper[4605]: I1001 13:58:06.743064 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-6g94f" Oct 01 13:58:06 crc kubenswrapper[4605]: I1001 13:58:06.782297 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-6g94f" Oct 01 13:58:07 crc kubenswrapper[4605]: I1001 13:58:07.935060 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6e7ce887-7ce4-45b3-8b4c-2a33c8deea3f" path="/var/lib/kubelet/pods/6e7ce887-7ce4-45b3-8b4c-2a33c8deea3f/volumes" Oct 01 13:58:08 crc kubenswrapper[4605]: I1001 13:58:08.401657 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-mkwnf" Oct 01 13:58:08 crc kubenswrapper[4605]: I1001 13:58:08.401762 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-mkwnf" Oct 01 13:58:08 crc kubenswrapper[4605]: I1001 13:58:08.458738 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-mkwnf" Oct 01 13:58:09 crc kubenswrapper[4605]: I1001 13:58:09.279325 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-mkwnf" Oct 01 13:58:10 crc kubenswrapper[4605]: I1001 13:58:10.420357 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mkwnf"] Oct 01 13:58:11 crc kubenswrapper[4605]: I1001 13:58:11.227991 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-mkwnf" podUID="ac6ce825-737e-4f31-9fc1-54b0cc9ff36d" containerName="registry-server" containerID="cri-o://8d76054a0855bc6212b2c04191b3e688b3f6efbdb3d17885aeb320fb1e5f249b" gracePeriod=2 Oct 01 13:58:11 crc kubenswrapper[4605]: I1001 13:58:11.614827 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mkwnf" Oct 01 13:58:11 crc kubenswrapper[4605]: I1001 13:58:11.768528 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vv646\" (UniqueName: \"kubernetes.io/projected/ac6ce825-737e-4f31-9fc1-54b0cc9ff36d-kube-api-access-vv646\") pod \"ac6ce825-737e-4f31-9fc1-54b0cc9ff36d\" (UID: \"ac6ce825-737e-4f31-9fc1-54b0cc9ff36d\") " Oct 01 13:58:11 crc kubenswrapper[4605]: I1001 13:58:11.768620 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac6ce825-737e-4f31-9fc1-54b0cc9ff36d-catalog-content\") pod \"ac6ce825-737e-4f31-9fc1-54b0cc9ff36d\" (UID: \"ac6ce825-737e-4f31-9fc1-54b0cc9ff36d\") " Oct 01 13:58:11 crc kubenswrapper[4605]: I1001 13:58:11.768644 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac6ce825-737e-4f31-9fc1-54b0cc9ff36d-utilities\") pod \"ac6ce825-737e-4f31-9fc1-54b0cc9ff36d\" (UID: \"ac6ce825-737e-4f31-9fc1-54b0cc9ff36d\") " Oct 01 13:58:11 crc kubenswrapper[4605]: I1001 13:58:11.771883 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ac6ce825-737e-4f31-9fc1-54b0cc9ff36d-utilities" (OuterVolumeSpecName: "utilities") pod "ac6ce825-737e-4f31-9fc1-54b0cc9ff36d" (UID: "ac6ce825-737e-4f31-9fc1-54b0cc9ff36d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 13:58:11 crc kubenswrapper[4605]: I1001 13:58:11.772352 4605 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac6ce825-737e-4f31-9fc1-54b0cc9ff36d-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 13:58:11 crc kubenswrapper[4605]: I1001 13:58:11.783746 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ac6ce825-737e-4f31-9fc1-54b0cc9ff36d-kube-api-access-vv646" (OuterVolumeSpecName: "kube-api-access-vv646") pod "ac6ce825-737e-4f31-9fc1-54b0cc9ff36d" (UID: "ac6ce825-737e-4f31-9fc1-54b0cc9ff36d"). InnerVolumeSpecName "kube-api-access-vv646". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:58:11 crc kubenswrapper[4605]: I1001 13:58:11.824157 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ac6ce825-737e-4f31-9fc1-54b0cc9ff36d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ac6ce825-737e-4f31-9fc1-54b0cc9ff36d" (UID: "ac6ce825-737e-4f31-9fc1-54b0cc9ff36d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 13:58:11 crc kubenswrapper[4605]: I1001 13:58:11.873470 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vv646\" (UniqueName: \"kubernetes.io/projected/ac6ce825-737e-4f31-9fc1-54b0cc9ff36d-kube-api-access-vv646\") on node \"crc\" DevicePath \"\"" Oct 01 13:58:11 crc kubenswrapper[4605]: I1001 13:58:11.873517 4605 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac6ce825-737e-4f31-9fc1-54b0cc9ff36d-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 13:58:12 crc kubenswrapper[4605]: I1001 13:58:12.236057 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mkwnf" Oct 01 13:58:12 crc kubenswrapper[4605]: I1001 13:58:12.236123 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mkwnf" event={"ID":"ac6ce825-737e-4f31-9fc1-54b0cc9ff36d","Type":"ContainerDied","Data":"8d76054a0855bc6212b2c04191b3e688b3f6efbdb3d17885aeb320fb1e5f249b"} Oct 01 13:58:12 crc kubenswrapper[4605]: I1001 13:58:12.236161 4605 scope.go:117] "RemoveContainer" containerID="8d76054a0855bc6212b2c04191b3e688b3f6efbdb3d17885aeb320fb1e5f249b" Oct 01 13:58:12 crc kubenswrapper[4605]: I1001 13:58:12.235923 4605 generic.go:334] "Generic (PLEG): container finished" podID="ac6ce825-737e-4f31-9fc1-54b0cc9ff36d" containerID="8d76054a0855bc6212b2c04191b3e688b3f6efbdb3d17885aeb320fb1e5f249b" exitCode=0 Oct 01 13:58:12 crc kubenswrapper[4605]: I1001 13:58:12.237733 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mkwnf" event={"ID":"ac6ce825-737e-4f31-9fc1-54b0cc9ff36d","Type":"ContainerDied","Data":"b9ddc2ff7bfa09d6890fa6a43d728ba8b8b8e0f2ca0b8f7f3b588fea1659de7c"} Oct 01 13:58:12 crc kubenswrapper[4605]: I1001 13:58:12.256312 4605 scope.go:117] "RemoveContainer" containerID="7170d9fefa54643bc31dae76e84f4d1b1216fc0f26690312b0d264df574ed8c5" Oct 01 13:58:12 crc kubenswrapper[4605]: I1001 13:58:12.259170 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mkwnf"] Oct 01 13:58:12 crc kubenswrapper[4605]: I1001 13:58:12.263310 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-mkwnf"] Oct 01 13:58:12 crc kubenswrapper[4605]: I1001 13:58:12.283678 4605 scope.go:117] "RemoveContainer" containerID="81a56d1d6996b0990c2bc5d6a4557c380646ff543d46d45e956aac7451885246" Oct 01 13:58:12 crc kubenswrapper[4605]: I1001 13:58:12.305579 4605 scope.go:117] "RemoveContainer" containerID="8d76054a0855bc6212b2c04191b3e688b3f6efbdb3d17885aeb320fb1e5f249b" Oct 01 13:58:12 crc kubenswrapper[4605]: E1001 13:58:12.305985 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8d76054a0855bc6212b2c04191b3e688b3f6efbdb3d17885aeb320fb1e5f249b\": container with ID starting with 8d76054a0855bc6212b2c04191b3e688b3f6efbdb3d17885aeb320fb1e5f249b not found: ID does not exist" containerID="8d76054a0855bc6212b2c04191b3e688b3f6efbdb3d17885aeb320fb1e5f249b" Oct 01 13:58:12 crc kubenswrapper[4605]: I1001 13:58:12.306019 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8d76054a0855bc6212b2c04191b3e688b3f6efbdb3d17885aeb320fb1e5f249b"} err="failed to get container status \"8d76054a0855bc6212b2c04191b3e688b3f6efbdb3d17885aeb320fb1e5f249b\": rpc error: code = NotFound desc = could not find container \"8d76054a0855bc6212b2c04191b3e688b3f6efbdb3d17885aeb320fb1e5f249b\": container with ID starting with 8d76054a0855bc6212b2c04191b3e688b3f6efbdb3d17885aeb320fb1e5f249b not found: ID does not exist" Oct 01 13:58:12 crc kubenswrapper[4605]: I1001 13:58:12.306040 4605 scope.go:117] "RemoveContainer" containerID="7170d9fefa54643bc31dae76e84f4d1b1216fc0f26690312b0d264df574ed8c5" Oct 01 13:58:12 crc kubenswrapper[4605]: E1001 13:58:12.306374 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7170d9fefa54643bc31dae76e84f4d1b1216fc0f26690312b0d264df574ed8c5\": container with ID starting with 7170d9fefa54643bc31dae76e84f4d1b1216fc0f26690312b0d264df574ed8c5 not found: ID does not exist" containerID="7170d9fefa54643bc31dae76e84f4d1b1216fc0f26690312b0d264df574ed8c5" Oct 01 13:58:12 crc kubenswrapper[4605]: I1001 13:58:12.306396 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7170d9fefa54643bc31dae76e84f4d1b1216fc0f26690312b0d264df574ed8c5"} err="failed to get container status \"7170d9fefa54643bc31dae76e84f4d1b1216fc0f26690312b0d264df574ed8c5\": rpc error: code = NotFound desc = could not find container \"7170d9fefa54643bc31dae76e84f4d1b1216fc0f26690312b0d264df574ed8c5\": container with ID starting with 7170d9fefa54643bc31dae76e84f4d1b1216fc0f26690312b0d264df574ed8c5 not found: ID does not exist" Oct 01 13:58:12 crc kubenswrapper[4605]: I1001 13:58:12.306409 4605 scope.go:117] "RemoveContainer" containerID="81a56d1d6996b0990c2bc5d6a4557c380646ff543d46d45e956aac7451885246" Oct 01 13:58:12 crc kubenswrapper[4605]: E1001 13:58:12.306695 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"81a56d1d6996b0990c2bc5d6a4557c380646ff543d46d45e956aac7451885246\": container with ID starting with 81a56d1d6996b0990c2bc5d6a4557c380646ff543d46d45e956aac7451885246 not found: ID does not exist" containerID="81a56d1d6996b0990c2bc5d6a4557c380646ff543d46d45e956aac7451885246" Oct 01 13:58:12 crc kubenswrapper[4605]: I1001 13:58:12.306728 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"81a56d1d6996b0990c2bc5d6a4557c380646ff543d46d45e956aac7451885246"} err="failed to get container status \"81a56d1d6996b0990c2bc5d6a4557c380646ff543d46d45e956aac7451885246\": rpc error: code = NotFound desc = could not find container \"81a56d1d6996b0990c2bc5d6a4557c380646ff543d46d45e956aac7451885246\": container with ID starting with 81a56d1d6996b0990c2bc5d6a4557c380646ff543d46d45e956aac7451885246 not found: ID does not exist" Oct 01 13:58:12 crc kubenswrapper[4605]: I1001 13:58:12.343907 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-bxrk6" Oct 01 13:58:12 crc kubenswrapper[4605]: I1001 13:58:12.473389 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-5d688f5ffc-jlb75" Oct 01 13:58:13 crc kubenswrapper[4605]: I1001 13:58:13.225553 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-8vx9r"] Oct 01 13:58:13 crc kubenswrapper[4605]: E1001 13:58:13.226062 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac6ce825-737e-4f31-9fc1-54b0cc9ff36d" containerName="registry-server" Oct 01 13:58:13 crc kubenswrapper[4605]: I1001 13:58:13.226073 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac6ce825-737e-4f31-9fc1-54b0cc9ff36d" containerName="registry-server" Oct 01 13:58:13 crc kubenswrapper[4605]: E1001 13:58:13.226083 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e7ce887-7ce4-45b3-8b4c-2a33c8deea3f" containerName="extract-utilities" Oct 01 13:58:13 crc kubenswrapper[4605]: I1001 13:58:13.226101 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e7ce887-7ce4-45b3-8b4c-2a33c8deea3f" containerName="extract-utilities" Oct 01 13:58:13 crc kubenswrapper[4605]: E1001 13:58:13.226116 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e7ce887-7ce4-45b3-8b4c-2a33c8deea3f" containerName="extract-content" Oct 01 13:58:13 crc kubenswrapper[4605]: I1001 13:58:13.226122 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e7ce887-7ce4-45b3-8b4c-2a33c8deea3f" containerName="extract-content" Oct 01 13:58:13 crc kubenswrapper[4605]: E1001 13:58:13.226131 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e7ce887-7ce4-45b3-8b4c-2a33c8deea3f" containerName="registry-server" Oct 01 13:58:13 crc kubenswrapper[4605]: I1001 13:58:13.226137 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e7ce887-7ce4-45b3-8b4c-2a33c8deea3f" containerName="registry-server" Oct 01 13:58:13 crc kubenswrapper[4605]: E1001 13:58:13.226149 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac6ce825-737e-4f31-9fc1-54b0cc9ff36d" containerName="extract-content" Oct 01 13:58:13 crc kubenswrapper[4605]: I1001 13:58:13.226155 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac6ce825-737e-4f31-9fc1-54b0cc9ff36d" containerName="extract-content" Oct 01 13:58:13 crc kubenswrapper[4605]: E1001 13:58:13.226166 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac6ce825-737e-4f31-9fc1-54b0cc9ff36d" containerName="extract-utilities" Oct 01 13:58:13 crc kubenswrapper[4605]: I1001 13:58:13.226171 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac6ce825-737e-4f31-9fc1-54b0cc9ff36d" containerName="extract-utilities" Oct 01 13:58:13 crc kubenswrapper[4605]: I1001 13:58:13.226279 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac6ce825-737e-4f31-9fc1-54b0cc9ff36d" containerName="registry-server" Oct 01 13:58:13 crc kubenswrapper[4605]: I1001 13:58:13.226289 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e7ce887-7ce4-45b3-8b4c-2a33c8deea3f" containerName="registry-server" Oct 01 13:58:13 crc kubenswrapper[4605]: I1001 13:58:13.226732 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-8vx9r" Oct 01 13:58:13 crc kubenswrapper[4605]: I1001 13:58:13.228977 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Oct 01 13:58:13 crc kubenswrapper[4605]: I1001 13:58:13.229348 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Oct 01 13:58:13 crc kubenswrapper[4605]: I1001 13:58:13.229517 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-m94f6" Oct 01 13:58:13 crc kubenswrapper[4605]: I1001 13:58:13.237528 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-8vx9r"] Oct 01 13:58:13 crc kubenswrapper[4605]: I1001 13:58:13.294985 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w5g4z\" (UniqueName: \"kubernetes.io/projected/c2be49ad-0f11-4479-b725-29854a0c1b8f-kube-api-access-w5g4z\") pod \"openstack-operator-index-8vx9r\" (UID: \"c2be49ad-0f11-4479-b725-29854a0c1b8f\") " pod="openstack-operators/openstack-operator-index-8vx9r" Oct 01 13:58:13 crc kubenswrapper[4605]: I1001 13:58:13.396634 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w5g4z\" (UniqueName: \"kubernetes.io/projected/c2be49ad-0f11-4479-b725-29854a0c1b8f-kube-api-access-w5g4z\") pod \"openstack-operator-index-8vx9r\" (UID: \"c2be49ad-0f11-4479-b725-29854a0c1b8f\") " pod="openstack-operators/openstack-operator-index-8vx9r" Oct 01 13:58:13 crc kubenswrapper[4605]: I1001 13:58:13.421066 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w5g4z\" (UniqueName: \"kubernetes.io/projected/c2be49ad-0f11-4479-b725-29854a0c1b8f-kube-api-access-w5g4z\") pod \"openstack-operator-index-8vx9r\" (UID: \"c2be49ad-0f11-4479-b725-29854a0c1b8f\") " pod="openstack-operators/openstack-operator-index-8vx9r" Oct 01 13:58:13 crc kubenswrapper[4605]: I1001 13:58:13.544320 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-8vx9r" Oct 01 13:58:13 crc kubenswrapper[4605]: I1001 13:58:13.933313 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ac6ce825-737e-4f31-9fc1-54b0cc9ff36d" path="/var/lib/kubelet/pods/ac6ce825-737e-4f31-9fc1-54b0cc9ff36d/volumes" Oct 01 13:58:14 crc kubenswrapper[4605]: I1001 13:58:14.007400 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-8vx9r"] Oct 01 13:58:14 crc kubenswrapper[4605]: I1001 13:58:14.253452 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-8vx9r" event={"ID":"c2be49ad-0f11-4479-b725-29854a0c1b8f","Type":"ContainerStarted","Data":"9d5826f6f381c3f745fcc82a3e666e8e08c7d926e836e1c0795094cb0c87bf74"} Oct 01 13:58:16 crc kubenswrapper[4605]: I1001 13:58:16.269676 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-8vx9r" event={"ID":"c2be49ad-0f11-4479-b725-29854a0c1b8f","Type":"ContainerStarted","Data":"a2021f31724fed3fc065fab263a65819c1dc0521fc10c4ce62f4181d43d33472"} Oct 01 13:58:16 crc kubenswrapper[4605]: I1001 13:58:16.292458 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-8vx9r" podStartSLOduration=1.356207156 podStartE2EDuration="3.292437483s" podCreationTimestamp="2025-10-01 13:58:13 +0000 UTC" firstStartedPulling="2025-10-01 13:58:14.019050142 +0000 UTC m=+816.763026360" lastFinishedPulling="2025-10-01 13:58:15.955280479 +0000 UTC m=+818.699256687" observedRunningTime="2025-10-01 13:58:16.28761585 +0000 UTC m=+819.031592058" watchObservedRunningTime="2025-10-01 13:58:16.292437483 +0000 UTC m=+819.036413701" Oct 01 13:58:20 crc kubenswrapper[4605]: I1001 13:58:20.236240 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-46ngz"] Oct 01 13:58:20 crc kubenswrapper[4605]: I1001 13:58:20.247317 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-46ngz" Oct 01 13:58:20 crc kubenswrapper[4605]: I1001 13:58:20.267421 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-46ngz"] Oct 01 13:58:20 crc kubenswrapper[4605]: I1001 13:58:20.379574 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/96610e4e-8a1c-4a4e-92ff-166271766e99-catalog-content\") pod \"redhat-operators-46ngz\" (UID: \"96610e4e-8a1c-4a4e-92ff-166271766e99\") " pod="openshift-marketplace/redhat-operators-46ngz" Oct 01 13:58:20 crc kubenswrapper[4605]: I1001 13:58:20.379625 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/96610e4e-8a1c-4a4e-92ff-166271766e99-utilities\") pod \"redhat-operators-46ngz\" (UID: \"96610e4e-8a1c-4a4e-92ff-166271766e99\") " pod="openshift-marketplace/redhat-operators-46ngz" Oct 01 13:58:20 crc kubenswrapper[4605]: I1001 13:58:20.379708 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ljlrx\" (UniqueName: \"kubernetes.io/projected/96610e4e-8a1c-4a4e-92ff-166271766e99-kube-api-access-ljlrx\") pod \"redhat-operators-46ngz\" (UID: \"96610e4e-8a1c-4a4e-92ff-166271766e99\") " pod="openshift-marketplace/redhat-operators-46ngz" Oct 01 13:58:20 crc kubenswrapper[4605]: I1001 13:58:20.480802 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ljlrx\" (UniqueName: \"kubernetes.io/projected/96610e4e-8a1c-4a4e-92ff-166271766e99-kube-api-access-ljlrx\") pod \"redhat-operators-46ngz\" (UID: \"96610e4e-8a1c-4a4e-92ff-166271766e99\") " pod="openshift-marketplace/redhat-operators-46ngz" Oct 01 13:58:20 crc kubenswrapper[4605]: I1001 13:58:20.480923 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/96610e4e-8a1c-4a4e-92ff-166271766e99-catalog-content\") pod \"redhat-operators-46ngz\" (UID: \"96610e4e-8a1c-4a4e-92ff-166271766e99\") " pod="openshift-marketplace/redhat-operators-46ngz" Oct 01 13:58:20 crc kubenswrapper[4605]: I1001 13:58:20.480955 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/96610e4e-8a1c-4a4e-92ff-166271766e99-utilities\") pod \"redhat-operators-46ngz\" (UID: \"96610e4e-8a1c-4a4e-92ff-166271766e99\") " pod="openshift-marketplace/redhat-operators-46ngz" Oct 01 13:58:20 crc kubenswrapper[4605]: I1001 13:58:20.481421 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/96610e4e-8a1c-4a4e-92ff-166271766e99-catalog-content\") pod \"redhat-operators-46ngz\" (UID: \"96610e4e-8a1c-4a4e-92ff-166271766e99\") " pod="openshift-marketplace/redhat-operators-46ngz" Oct 01 13:58:20 crc kubenswrapper[4605]: I1001 13:58:20.481546 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/96610e4e-8a1c-4a4e-92ff-166271766e99-utilities\") pod \"redhat-operators-46ngz\" (UID: \"96610e4e-8a1c-4a4e-92ff-166271766e99\") " pod="openshift-marketplace/redhat-operators-46ngz" Oct 01 13:58:20 crc kubenswrapper[4605]: I1001 13:58:20.505580 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ljlrx\" (UniqueName: \"kubernetes.io/projected/96610e4e-8a1c-4a4e-92ff-166271766e99-kube-api-access-ljlrx\") pod \"redhat-operators-46ngz\" (UID: \"96610e4e-8a1c-4a4e-92ff-166271766e99\") " pod="openshift-marketplace/redhat-operators-46ngz" Oct 01 13:58:20 crc kubenswrapper[4605]: I1001 13:58:20.583937 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-46ngz" Oct 01 13:58:21 crc kubenswrapper[4605]: I1001 13:58:21.002461 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-46ngz"] Oct 01 13:58:21 crc kubenswrapper[4605]: I1001 13:58:21.302441 4605 generic.go:334] "Generic (PLEG): container finished" podID="96610e4e-8a1c-4a4e-92ff-166271766e99" containerID="891615b1a922a12aad398310590cf23b716e48f4286e4680601a27f11c7dc2c7" exitCode=0 Oct 01 13:58:21 crc kubenswrapper[4605]: I1001 13:58:21.302497 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-46ngz" event={"ID":"96610e4e-8a1c-4a4e-92ff-166271766e99","Type":"ContainerDied","Data":"891615b1a922a12aad398310590cf23b716e48f4286e4680601a27f11c7dc2c7"} Oct 01 13:58:21 crc kubenswrapper[4605]: I1001 13:58:21.302560 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-46ngz" event={"ID":"96610e4e-8a1c-4a4e-92ff-166271766e99","Type":"ContainerStarted","Data":"19d4780097685a65362a91f3a792514cc162eb5da4b60229ae7a817ba537d606"} Oct 01 13:58:21 crc kubenswrapper[4605]: I1001 13:58:21.748928 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-6g94f" Oct 01 13:58:22 crc kubenswrapper[4605]: I1001 13:58:22.317478 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-46ngz" event={"ID":"96610e4e-8a1c-4a4e-92ff-166271766e99","Type":"ContainerStarted","Data":"17ed0ca3947172df65913f1bac4e9e35e218a7b27340d150ff21d547cfe7b3b4"} Oct 01 13:58:23 crc kubenswrapper[4605]: I1001 13:58:23.544810 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-8vx9r" Oct 01 13:58:23 crc kubenswrapper[4605]: I1001 13:58:23.544855 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-8vx9r" Oct 01 13:58:23 crc kubenswrapper[4605]: I1001 13:58:23.572822 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-8vx9r" Oct 01 13:58:24 crc kubenswrapper[4605]: I1001 13:58:24.332052 4605 generic.go:334] "Generic (PLEG): container finished" podID="96610e4e-8a1c-4a4e-92ff-166271766e99" containerID="17ed0ca3947172df65913f1bac4e9e35e218a7b27340d150ff21d547cfe7b3b4" exitCode=0 Oct 01 13:58:24 crc kubenswrapper[4605]: I1001 13:58:24.332138 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-46ngz" event={"ID":"96610e4e-8a1c-4a4e-92ff-166271766e99","Type":"ContainerDied","Data":"17ed0ca3947172df65913f1bac4e9e35e218a7b27340d150ff21d547cfe7b3b4"} Oct 01 13:58:24 crc kubenswrapper[4605]: I1001 13:58:24.371358 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-8vx9r" Oct 01 13:58:25 crc kubenswrapper[4605]: I1001 13:58:25.260560 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/1c76bd37ad0c2b23ca2a6f6e5550fa40350084769f929d7952643a875emzcsd"] Oct 01 13:58:25 crc kubenswrapper[4605]: I1001 13:58:25.261801 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/1c76bd37ad0c2b23ca2a6f6e5550fa40350084769f929d7952643a875emzcsd" Oct 01 13:58:25 crc kubenswrapper[4605]: I1001 13:58:25.264705 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-d4tft" Oct 01 13:58:25 crc kubenswrapper[4605]: I1001 13:58:25.278997 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/1c76bd37ad0c2b23ca2a6f6e5550fa40350084769f929d7952643a875emzcsd"] Oct 01 13:58:25 crc kubenswrapper[4605]: I1001 13:58:25.341085 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-46ngz" event={"ID":"96610e4e-8a1c-4a4e-92ff-166271766e99","Type":"ContainerStarted","Data":"f74ef1e7eb6598bab1a28db7d45d0beeb1bd0000b5794d2a02eacd408c76c28d"} Oct 01 13:58:25 crc kubenswrapper[4605]: I1001 13:58:25.364143 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-46ngz" podStartSLOduration=1.865256782 podStartE2EDuration="5.364127486s" podCreationTimestamp="2025-10-01 13:58:20 +0000 UTC" firstStartedPulling="2025-10-01 13:58:21.303954066 +0000 UTC m=+824.047930274" lastFinishedPulling="2025-10-01 13:58:24.80282477 +0000 UTC m=+827.546800978" observedRunningTime="2025-10-01 13:58:25.361883109 +0000 UTC m=+828.105859327" watchObservedRunningTime="2025-10-01 13:58:25.364127486 +0000 UTC m=+828.108103694" Oct 01 13:58:25 crc kubenswrapper[4605]: I1001 13:58:25.445985 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/67624804-868e-4db5-9eb8-aaec10c2a4ba-bundle\") pod \"1c76bd37ad0c2b23ca2a6f6e5550fa40350084769f929d7952643a875emzcsd\" (UID: \"67624804-868e-4db5-9eb8-aaec10c2a4ba\") " pod="openstack-operators/1c76bd37ad0c2b23ca2a6f6e5550fa40350084769f929d7952643a875emzcsd" Oct 01 13:58:25 crc kubenswrapper[4605]: I1001 13:58:25.446058 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jnvn2\" (UniqueName: \"kubernetes.io/projected/67624804-868e-4db5-9eb8-aaec10c2a4ba-kube-api-access-jnvn2\") pod \"1c76bd37ad0c2b23ca2a6f6e5550fa40350084769f929d7952643a875emzcsd\" (UID: \"67624804-868e-4db5-9eb8-aaec10c2a4ba\") " pod="openstack-operators/1c76bd37ad0c2b23ca2a6f6e5550fa40350084769f929d7952643a875emzcsd" Oct 01 13:58:25 crc kubenswrapper[4605]: I1001 13:58:25.446110 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/67624804-868e-4db5-9eb8-aaec10c2a4ba-util\") pod \"1c76bd37ad0c2b23ca2a6f6e5550fa40350084769f929d7952643a875emzcsd\" (UID: \"67624804-868e-4db5-9eb8-aaec10c2a4ba\") " pod="openstack-operators/1c76bd37ad0c2b23ca2a6f6e5550fa40350084769f929d7952643a875emzcsd" Oct 01 13:58:25 crc kubenswrapper[4605]: I1001 13:58:25.546794 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/67624804-868e-4db5-9eb8-aaec10c2a4ba-bundle\") pod \"1c76bd37ad0c2b23ca2a6f6e5550fa40350084769f929d7952643a875emzcsd\" (UID: \"67624804-868e-4db5-9eb8-aaec10c2a4ba\") " pod="openstack-operators/1c76bd37ad0c2b23ca2a6f6e5550fa40350084769f929d7952643a875emzcsd" Oct 01 13:58:25 crc kubenswrapper[4605]: I1001 13:58:25.546856 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jnvn2\" (UniqueName: \"kubernetes.io/projected/67624804-868e-4db5-9eb8-aaec10c2a4ba-kube-api-access-jnvn2\") pod \"1c76bd37ad0c2b23ca2a6f6e5550fa40350084769f929d7952643a875emzcsd\" (UID: \"67624804-868e-4db5-9eb8-aaec10c2a4ba\") " pod="openstack-operators/1c76bd37ad0c2b23ca2a6f6e5550fa40350084769f929d7952643a875emzcsd" Oct 01 13:58:25 crc kubenswrapper[4605]: I1001 13:58:25.546885 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/67624804-868e-4db5-9eb8-aaec10c2a4ba-util\") pod \"1c76bd37ad0c2b23ca2a6f6e5550fa40350084769f929d7952643a875emzcsd\" (UID: \"67624804-868e-4db5-9eb8-aaec10c2a4ba\") " pod="openstack-operators/1c76bd37ad0c2b23ca2a6f6e5550fa40350084769f929d7952643a875emzcsd" Oct 01 13:58:25 crc kubenswrapper[4605]: I1001 13:58:25.547324 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/67624804-868e-4db5-9eb8-aaec10c2a4ba-util\") pod \"1c76bd37ad0c2b23ca2a6f6e5550fa40350084769f929d7952643a875emzcsd\" (UID: \"67624804-868e-4db5-9eb8-aaec10c2a4ba\") " pod="openstack-operators/1c76bd37ad0c2b23ca2a6f6e5550fa40350084769f929d7952643a875emzcsd" Oct 01 13:58:25 crc kubenswrapper[4605]: I1001 13:58:25.547456 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/67624804-868e-4db5-9eb8-aaec10c2a4ba-bundle\") pod \"1c76bd37ad0c2b23ca2a6f6e5550fa40350084769f929d7952643a875emzcsd\" (UID: \"67624804-868e-4db5-9eb8-aaec10c2a4ba\") " pod="openstack-operators/1c76bd37ad0c2b23ca2a6f6e5550fa40350084769f929d7952643a875emzcsd" Oct 01 13:58:25 crc kubenswrapper[4605]: I1001 13:58:25.564899 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jnvn2\" (UniqueName: \"kubernetes.io/projected/67624804-868e-4db5-9eb8-aaec10c2a4ba-kube-api-access-jnvn2\") pod \"1c76bd37ad0c2b23ca2a6f6e5550fa40350084769f929d7952643a875emzcsd\" (UID: \"67624804-868e-4db5-9eb8-aaec10c2a4ba\") " pod="openstack-operators/1c76bd37ad0c2b23ca2a6f6e5550fa40350084769f929d7952643a875emzcsd" Oct 01 13:58:25 crc kubenswrapper[4605]: I1001 13:58:25.576618 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/1c76bd37ad0c2b23ca2a6f6e5550fa40350084769f929d7952643a875emzcsd" Oct 01 13:58:25 crc kubenswrapper[4605]: I1001 13:58:25.805750 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/1c76bd37ad0c2b23ca2a6f6e5550fa40350084769f929d7952643a875emzcsd"] Oct 01 13:58:25 crc kubenswrapper[4605]: W1001 13:58:25.817244 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod67624804_868e_4db5_9eb8_aaec10c2a4ba.slice/crio-f52692a80fbb5f6f42c688c1f63a4d87b70309efd84d2c3ceb884879ca07c713 WatchSource:0}: Error finding container f52692a80fbb5f6f42c688c1f63a4d87b70309efd84d2c3ceb884879ca07c713: Status 404 returned error can't find the container with id f52692a80fbb5f6f42c688c1f63a4d87b70309efd84d2c3ceb884879ca07c713 Oct 01 13:58:26 crc kubenswrapper[4605]: I1001 13:58:26.348125 4605 generic.go:334] "Generic (PLEG): container finished" podID="67624804-868e-4db5-9eb8-aaec10c2a4ba" containerID="eed07c96fd09ea870548cdf40e95c39e4e9268f9d10778cfb5143faeea38087a" exitCode=0 Oct 01 13:58:26 crc kubenswrapper[4605]: I1001 13:58:26.348225 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/1c76bd37ad0c2b23ca2a6f6e5550fa40350084769f929d7952643a875emzcsd" event={"ID":"67624804-868e-4db5-9eb8-aaec10c2a4ba","Type":"ContainerDied","Data":"eed07c96fd09ea870548cdf40e95c39e4e9268f9d10778cfb5143faeea38087a"} Oct 01 13:58:26 crc kubenswrapper[4605]: I1001 13:58:26.349359 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/1c76bd37ad0c2b23ca2a6f6e5550fa40350084769f929d7952643a875emzcsd" event={"ID":"67624804-868e-4db5-9eb8-aaec10c2a4ba","Type":"ContainerStarted","Data":"f52692a80fbb5f6f42c688c1f63a4d87b70309efd84d2c3ceb884879ca07c713"} Oct 01 13:58:27 crc kubenswrapper[4605]: I1001 13:58:27.357974 4605 generic.go:334] "Generic (PLEG): container finished" podID="67624804-868e-4db5-9eb8-aaec10c2a4ba" containerID="f6584f79dafdf7fbee07aa1359582af5a4f77ee5cc58a6e3a34eda2061a5635a" exitCode=0 Oct 01 13:58:27 crc kubenswrapper[4605]: I1001 13:58:27.358033 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/1c76bd37ad0c2b23ca2a6f6e5550fa40350084769f929d7952643a875emzcsd" event={"ID":"67624804-868e-4db5-9eb8-aaec10c2a4ba","Type":"ContainerDied","Data":"f6584f79dafdf7fbee07aa1359582af5a4f77ee5cc58a6e3a34eda2061a5635a"} Oct 01 13:58:28 crc kubenswrapper[4605]: I1001 13:58:28.366824 4605 generic.go:334] "Generic (PLEG): container finished" podID="67624804-868e-4db5-9eb8-aaec10c2a4ba" containerID="3a51fe96b9fa1ba324d435a6566f2548b269ee19a5b853115026f191f1539517" exitCode=0 Oct 01 13:58:28 crc kubenswrapper[4605]: I1001 13:58:28.366911 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/1c76bd37ad0c2b23ca2a6f6e5550fa40350084769f929d7952643a875emzcsd" event={"ID":"67624804-868e-4db5-9eb8-aaec10c2a4ba","Type":"ContainerDied","Data":"3a51fe96b9fa1ba324d435a6566f2548b269ee19a5b853115026f191f1539517"} Oct 01 13:58:29 crc kubenswrapper[4605]: I1001 13:58:29.035909 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-v2kws"] Oct 01 13:58:29 crc kubenswrapper[4605]: I1001 13:58:29.037442 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-v2kws" Oct 01 13:58:29 crc kubenswrapper[4605]: I1001 13:58:29.055994 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-v2kws"] Oct 01 13:58:29 crc kubenswrapper[4605]: I1001 13:58:29.189686 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d6a45c94-45c6-4001-8bc0-efec3e892479-utilities\") pod \"community-operators-v2kws\" (UID: \"d6a45c94-45c6-4001-8bc0-efec3e892479\") " pod="openshift-marketplace/community-operators-v2kws" Oct 01 13:58:29 crc kubenswrapper[4605]: I1001 13:58:29.189765 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2x78d\" (UniqueName: \"kubernetes.io/projected/d6a45c94-45c6-4001-8bc0-efec3e892479-kube-api-access-2x78d\") pod \"community-operators-v2kws\" (UID: \"d6a45c94-45c6-4001-8bc0-efec3e892479\") " pod="openshift-marketplace/community-operators-v2kws" Oct 01 13:58:29 crc kubenswrapper[4605]: I1001 13:58:29.189820 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d6a45c94-45c6-4001-8bc0-efec3e892479-catalog-content\") pod \"community-operators-v2kws\" (UID: \"d6a45c94-45c6-4001-8bc0-efec3e892479\") " pod="openshift-marketplace/community-operators-v2kws" Oct 01 13:58:29 crc kubenswrapper[4605]: I1001 13:58:29.290705 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d6a45c94-45c6-4001-8bc0-efec3e892479-catalog-content\") pod \"community-operators-v2kws\" (UID: \"d6a45c94-45c6-4001-8bc0-efec3e892479\") " pod="openshift-marketplace/community-operators-v2kws" Oct 01 13:58:29 crc kubenswrapper[4605]: I1001 13:58:29.290781 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d6a45c94-45c6-4001-8bc0-efec3e892479-utilities\") pod \"community-operators-v2kws\" (UID: \"d6a45c94-45c6-4001-8bc0-efec3e892479\") " pod="openshift-marketplace/community-operators-v2kws" Oct 01 13:58:29 crc kubenswrapper[4605]: I1001 13:58:29.290820 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2x78d\" (UniqueName: \"kubernetes.io/projected/d6a45c94-45c6-4001-8bc0-efec3e892479-kube-api-access-2x78d\") pod \"community-operators-v2kws\" (UID: \"d6a45c94-45c6-4001-8bc0-efec3e892479\") " pod="openshift-marketplace/community-operators-v2kws" Oct 01 13:58:29 crc kubenswrapper[4605]: I1001 13:58:29.291316 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d6a45c94-45c6-4001-8bc0-efec3e892479-utilities\") pod \"community-operators-v2kws\" (UID: \"d6a45c94-45c6-4001-8bc0-efec3e892479\") " pod="openshift-marketplace/community-operators-v2kws" Oct 01 13:58:29 crc kubenswrapper[4605]: I1001 13:58:29.291368 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d6a45c94-45c6-4001-8bc0-efec3e892479-catalog-content\") pod \"community-operators-v2kws\" (UID: \"d6a45c94-45c6-4001-8bc0-efec3e892479\") " pod="openshift-marketplace/community-operators-v2kws" Oct 01 13:58:29 crc kubenswrapper[4605]: I1001 13:58:29.312644 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2x78d\" (UniqueName: \"kubernetes.io/projected/d6a45c94-45c6-4001-8bc0-efec3e892479-kube-api-access-2x78d\") pod \"community-operators-v2kws\" (UID: \"d6a45c94-45c6-4001-8bc0-efec3e892479\") " pod="openshift-marketplace/community-operators-v2kws" Oct 01 13:58:29 crc kubenswrapper[4605]: I1001 13:58:29.350536 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-v2kws" Oct 01 13:58:29 crc kubenswrapper[4605]: I1001 13:58:29.755015 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/1c76bd37ad0c2b23ca2a6f6e5550fa40350084769f929d7952643a875emzcsd" Oct 01 13:58:29 crc kubenswrapper[4605]: I1001 13:58:29.873457 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-v2kws"] Oct 01 13:58:29 crc kubenswrapper[4605]: I1001 13:58:29.899524 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/67624804-868e-4db5-9eb8-aaec10c2a4ba-bundle\") pod \"67624804-868e-4db5-9eb8-aaec10c2a4ba\" (UID: \"67624804-868e-4db5-9eb8-aaec10c2a4ba\") " Oct 01 13:58:29 crc kubenswrapper[4605]: I1001 13:58:29.899605 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jnvn2\" (UniqueName: \"kubernetes.io/projected/67624804-868e-4db5-9eb8-aaec10c2a4ba-kube-api-access-jnvn2\") pod \"67624804-868e-4db5-9eb8-aaec10c2a4ba\" (UID: \"67624804-868e-4db5-9eb8-aaec10c2a4ba\") " Oct 01 13:58:29 crc kubenswrapper[4605]: I1001 13:58:29.899621 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/67624804-868e-4db5-9eb8-aaec10c2a4ba-util\") pod \"67624804-868e-4db5-9eb8-aaec10c2a4ba\" (UID: \"67624804-868e-4db5-9eb8-aaec10c2a4ba\") " Oct 01 13:58:29 crc kubenswrapper[4605]: I1001 13:58:29.900179 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/67624804-868e-4db5-9eb8-aaec10c2a4ba-bundle" (OuterVolumeSpecName: "bundle") pod "67624804-868e-4db5-9eb8-aaec10c2a4ba" (UID: "67624804-868e-4db5-9eb8-aaec10c2a4ba"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 13:58:29 crc kubenswrapper[4605]: I1001 13:58:29.911817 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/67624804-868e-4db5-9eb8-aaec10c2a4ba-util" (OuterVolumeSpecName: "util") pod "67624804-868e-4db5-9eb8-aaec10c2a4ba" (UID: "67624804-868e-4db5-9eb8-aaec10c2a4ba"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 13:58:29 crc kubenswrapper[4605]: I1001 13:58:29.912056 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/67624804-868e-4db5-9eb8-aaec10c2a4ba-kube-api-access-jnvn2" (OuterVolumeSpecName: "kube-api-access-jnvn2") pod "67624804-868e-4db5-9eb8-aaec10c2a4ba" (UID: "67624804-868e-4db5-9eb8-aaec10c2a4ba"). InnerVolumeSpecName "kube-api-access-jnvn2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:58:30 crc kubenswrapper[4605]: I1001 13:58:30.001250 4605 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/67624804-868e-4db5-9eb8-aaec10c2a4ba-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 13:58:30 crc kubenswrapper[4605]: I1001 13:58:30.001362 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jnvn2\" (UniqueName: \"kubernetes.io/projected/67624804-868e-4db5-9eb8-aaec10c2a4ba-kube-api-access-jnvn2\") on node \"crc\" DevicePath \"\"" Oct 01 13:58:30 crc kubenswrapper[4605]: I1001 13:58:30.001425 4605 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/67624804-868e-4db5-9eb8-aaec10c2a4ba-util\") on node \"crc\" DevicePath \"\"" Oct 01 13:58:30 crc kubenswrapper[4605]: I1001 13:58:30.379573 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/1c76bd37ad0c2b23ca2a6f6e5550fa40350084769f929d7952643a875emzcsd" event={"ID":"67624804-868e-4db5-9eb8-aaec10c2a4ba","Type":"ContainerDied","Data":"f52692a80fbb5f6f42c688c1f63a4d87b70309efd84d2c3ceb884879ca07c713"} Oct 01 13:58:30 crc kubenswrapper[4605]: I1001 13:58:30.379617 4605 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f52692a80fbb5f6f42c688c1f63a4d87b70309efd84d2c3ceb884879ca07c713" Oct 01 13:58:30 crc kubenswrapper[4605]: I1001 13:58:30.379692 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/1c76bd37ad0c2b23ca2a6f6e5550fa40350084769f929d7952643a875emzcsd" Oct 01 13:58:30 crc kubenswrapper[4605]: I1001 13:58:30.383182 4605 generic.go:334] "Generic (PLEG): container finished" podID="d6a45c94-45c6-4001-8bc0-efec3e892479" containerID="1243dbeaa039f6421384d30156206ec296c7cdaf10a10eaeacd79b31f8eb7661" exitCode=0 Oct 01 13:58:30 crc kubenswrapper[4605]: I1001 13:58:30.383244 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v2kws" event={"ID":"d6a45c94-45c6-4001-8bc0-efec3e892479","Type":"ContainerDied","Data":"1243dbeaa039f6421384d30156206ec296c7cdaf10a10eaeacd79b31f8eb7661"} Oct 01 13:58:30 crc kubenswrapper[4605]: I1001 13:58:30.383355 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v2kws" event={"ID":"d6a45c94-45c6-4001-8bc0-efec3e892479","Type":"ContainerStarted","Data":"b9881b083e0135168d9a042e6c2b740256d2c1d6b8d4c84ae2160b7153b0caec"} Oct 01 13:58:30 crc kubenswrapper[4605]: I1001 13:58:30.585175 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-46ngz" Oct 01 13:58:30 crc kubenswrapper[4605]: I1001 13:58:30.585238 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-46ngz" Oct 01 13:58:30 crc kubenswrapper[4605]: I1001 13:58:30.642514 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-46ngz" Oct 01 13:58:31 crc kubenswrapper[4605]: I1001 13:58:31.392187 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v2kws" event={"ID":"d6a45c94-45c6-4001-8bc0-efec3e892479","Type":"ContainerStarted","Data":"04c5872811f6ec08cb54c447f29c99ef5d00e086a9db6f41131f24f7ce10a26a"} Oct 01 13:58:31 crc kubenswrapper[4605]: I1001 13:58:31.459506 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-46ngz" Oct 01 13:58:32 crc kubenswrapper[4605]: I1001 13:58:32.399772 4605 generic.go:334] "Generic (PLEG): container finished" podID="d6a45c94-45c6-4001-8bc0-efec3e892479" containerID="04c5872811f6ec08cb54c447f29c99ef5d00e086a9db6f41131f24f7ce10a26a" exitCode=0 Oct 01 13:58:32 crc kubenswrapper[4605]: I1001 13:58:32.399869 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v2kws" event={"ID":"d6a45c94-45c6-4001-8bc0-efec3e892479","Type":"ContainerDied","Data":"04c5872811f6ec08cb54c447f29c99ef5d00e086a9db6f41131f24f7ce10a26a"} Oct 01 13:58:33 crc kubenswrapper[4605]: I1001 13:58:33.411385 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v2kws" event={"ID":"d6a45c94-45c6-4001-8bc0-efec3e892479","Type":"ContainerStarted","Data":"543034686f494b1a55d60c57e22971a157953e59e61fdd262b931106808cd33b"} Oct 01 13:58:33 crc kubenswrapper[4605]: I1001 13:58:33.453768 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-v2kws" podStartSLOduration=1.958152964 podStartE2EDuration="4.453724836s" podCreationTimestamp="2025-10-01 13:58:29 +0000 UTC" firstStartedPulling="2025-10-01 13:58:30.386148256 +0000 UTC m=+833.130124504" lastFinishedPulling="2025-10-01 13:58:32.881720168 +0000 UTC m=+835.625696376" observedRunningTime="2025-10-01 13:58:33.449078188 +0000 UTC m=+836.193054396" watchObservedRunningTime="2025-10-01 13:58:33.453724836 +0000 UTC m=+836.197701034" Oct 01 13:58:34 crc kubenswrapper[4605]: I1001 13:58:34.002708 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-5b5f977c9c-ztrlb"] Oct 01 13:58:34 crc kubenswrapper[4605]: E1001 13:58:34.003287 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67624804-868e-4db5-9eb8-aaec10c2a4ba" containerName="extract" Oct 01 13:58:34 crc kubenswrapper[4605]: I1001 13:58:34.003309 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="67624804-868e-4db5-9eb8-aaec10c2a4ba" containerName="extract" Oct 01 13:58:34 crc kubenswrapper[4605]: E1001 13:58:34.003326 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67624804-868e-4db5-9eb8-aaec10c2a4ba" containerName="pull" Oct 01 13:58:34 crc kubenswrapper[4605]: I1001 13:58:34.003333 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="67624804-868e-4db5-9eb8-aaec10c2a4ba" containerName="pull" Oct 01 13:58:34 crc kubenswrapper[4605]: E1001 13:58:34.003352 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67624804-868e-4db5-9eb8-aaec10c2a4ba" containerName="util" Oct 01 13:58:34 crc kubenswrapper[4605]: I1001 13:58:34.003360 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="67624804-868e-4db5-9eb8-aaec10c2a4ba" containerName="util" Oct 01 13:58:34 crc kubenswrapper[4605]: I1001 13:58:34.003480 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="67624804-868e-4db5-9eb8-aaec10c2a4ba" containerName="extract" Oct 01 13:58:34 crc kubenswrapper[4605]: I1001 13:58:34.004187 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-5b5f977c9c-ztrlb" Oct 01 13:58:34 crc kubenswrapper[4605]: I1001 13:58:34.006850 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-v5x8f" Oct 01 13:58:34 crc kubenswrapper[4605]: I1001 13:58:34.031661 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-5b5f977c9c-ztrlb"] Oct 01 13:58:34 crc kubenswrapper[4605]: I1001 13:58:34.154540 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nwp5q\" (UniqueName: \"kubernetes.io/projected/471dd8b1-617a-4cba-a9e6-0fc59bfc4b6a-kube-api-access-nwp5q\") pod \"openstack-operator-controller-operator-5b5f977c9c-ztrlb\" (UID: \"471dd8b1-617a-4cba-a9e6-0fc59bfc4b6a\") " pod="openstack-operators/openstack-operator-controller-operator-5b5f977c9c-ztrlb" Oct 01 13:58:34 crc kubenswrapper[4605]: I1001 13:58:34.255893 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nwp5q\" (UniqueName: \"kubernetes.io/projected/471dd8b1-617a-4cba-a9e6-0fc59bfc4b6a-kube-api-access-nwp5q\") pod \"openstack-operator-controller-operator-5b5f977c9c-ztrlb\" (UID: \"471dd8b1-617a-4cba-a9e6-0fc59bfc4b6a\") " pod="openstack-operators/openstack-operator-controller-operator-5b5f977c9c-ztrlb" Oct 01 13:58:34 crc kubenswrapper[4605]: I1001 13:58:34.297864 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nwp5q\" (UniqueName: \"kubernetes.io/projected/471dd8b1-617a-4cba-a9e6-0fc59bfc4b6a-kube-api-access-nwp5q\") pod \"openstack-operator-controller-operator-5b5f977c9c-ztrlb\" (UID: \"471dd8b1-617a-4cba-a9e6-0fc59bfc4b6a\") " pod="openstack-operators/openstack-operator-controller-operator-5b5f977c9c-ztrlb" Oct 01 13:58:34 crc kubenswrapper[4605]: I1001 13:58:34.319723 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-5b5f977c9c-ztrlb" Oct 01 13:58:34 crc kubenswrapper[4605]: I1001 13:58:34.671702 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-5b5f977c9c-ztrlb"] Oct 01 13:58:35 crc kubenswrapper[4605]: I1001 13:58:35.020734 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-46ngz"] Oct 01 13:58:35 crc kubenswrapper[4605]: I1001 13:58:35.020930 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-46ngz" podUID="96610e4e-8a1c-4a4e-92ff-166271766e99" containerName="registry-server" containerID="cri-o://f74ef1e7eb6598bab1a28db7d45d0beeb1bd0000b5794d2a02eacd408c76c28d" gracePeriod=2 Oct 01 13:58:35 crc kubenswrapper[4605]: I1001 13:58:35.433357 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-5b5f977c9c-ztrlb" event={"ID":"471dd8b1-617a-4cba-a9e6-0fc59bfc4b6a","Type":"ContainerStarted","Data":"5b3e8a65dc94a4b69d2ee3086e6924a304021b607594e32ede8bafe27d6e1b97"} Oct 01 13:58:35 crc kubenswrapper[4605]: I1001 13:58:35.435783 4605 generic.go:334] "Generic (PLEG): container finished" podID="96610e4e-8a1c-4a4e-92ff-166271766e99" containerID="f74ef1e7eb6598bab1a28db7d45d0beeb1bd0000b5794d2a02eacd408c76c28d" exitCode=0 Oct 01 13:58:35 crc kubenswrapper[4605]: I1001 13:58:35.435812 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-46ngz" event={"ID":"96610e4e-8a1c-4a4e-92ff-166271766e99","Type":"ContainerDied","Data":"f74ef1e7eb6598bab1a28db7d45d0beeb1bd0000b5794d2a02eacd408c76c28d"} Oct 01 13:58:35 crc kubenswrapper[4605]: I1001 13:58:35.526301 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-46ngz" Oct 01 13:58:35 crc kubenswrapper[4605]: I1001 13:58:35.676111 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ljlrx\" (UniqueName: \"kubernetes.io/projected/96610e4e-8a1c-4a4e-92ff-166271766e99-kube-api-access-ljlrx\") pod \"96610e4e-8a1c-4a4e-92ff-166271766e99\" (UID: \"96610e4e-8a1c-4a4e-92ff-166271766e99\") " Oct 01 13:58:35 crc kubenswrapper[4605]: I1001 13:58:35.676230 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/96610e4e-8a1c-4a4e-92ff-166271766e99-catalog-content\") pod \"96610e4e-8a1c-4a4e-92ff-166271766e99\" (UID: \"96610e4e-8a1c-4a4e-92ff-166271766e99\") " Oct 01 13:58:35 crc kubenswrapper[4605]: I1001 13:58:35.676250 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/96610e4e-8a1c-4a4e-92ff-166271766e99-utilities\") pod \"96610e4e-8a1c-4a4e-92ff-166271766e99\" (UID: \"96610e4e-8a1c-4a4e-92ff-166271766e99\") " Oct 01 13:58:35 crc kubenswrapper[4605]: I1001 13:58:35.678774 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/96610e4e-8a1c-4a4e-92ff-166271766e99-utilities" (OuterVolumeSpecName: "utilities") pod "96610e4e-8a1c-4a4e-92ff-166271766e99" (UID: "96610e4e-8a1c-4a4e-92ff-166271766e99"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 13:58:35 crc kubenswrapper[4605]: I1001 13:58:35.684037 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96610e4e-8a1c-4a4e-92ff-166271766e99-kube-api-access-ljlrx" (OuterVolumeSpecName: "kube-api-access-ljlrx") pod "96610e4e-8a1c-4a4e-92ff-166271766e99" (UID: "96610e4e-8a1c-4a4e-92ff-166271766e99"). InnerVolumeSpecName "kube-api-access-ljlrx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:58:35 crc kubenswrapper[4605]: I1001 13:58:35.768145 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/96610e4e-8a1c-4a4e-92ff-166271766e99-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "96610e4e-8a1c-4a4e-92ff-166271766e99" (UID: "96610e4e-8a1c-4a4e-92ff-166271766e99"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 13:58:35 crc kubenswrapper[4605]: I1001 13:58:35.777420 4605 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/96610e4e-8a1c-4a4e-92ff-166271766e99-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 13:58:35 crc kubenswrapper[4605]: I1001 13:58:35.777454 4605 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/96610e4e-8a1c-4a4e-92ff-166271766e99-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 13:58:35 crc kubenswrapper[4605]: I1001 13:58:35.777465 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ljlrx\" (UniqueName: \"kubernetes.io/projected/96610e4e-8a1c-4a4e-92ff-166271766e99-kube-api-access-ljlrx\") on node \"crc\" DevicePath \"\"" Oct 01 13:58:36 crc kubenswrapper[4605]: I1001 13:58:36.444347 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-46ngz" event={"ID":"96610e4e-8a1c-4a4e-92ff-166271766e99","Type":"ContainerDied","Data":"19d4780097685a65362a91f3a792514cc162eb5da4b60229ae7a817ba537d606"} Oct 01 13:58:36 crc kubenswrapper[4605]: I1001 13:58:36.444404 4605 scope.go:117] "RemoveContainer" containerID="f74ef1e7eb6598bab1a28db7d45d0beeb1bd0000b5794d2a02eacd408c76c28d" Oct 01 13:58:36 crc kubenswrapper[4605]: I1001 13:58:36.444555 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-46ngz" Oct 01 13:58:36 crc kubenswrapper[4605]: I1001 13:58:36.466610 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-46ngz"] Oct 01 13:58:36 crc kubenswrapper[4605]: I1001 13:58:36.473967 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-46ngz"] Oct 01 13:58:36 crc kubenswrapper[4605]: I1001 13:58:36.477505 4605 scope.go:117] "RemoveContainer" containerID="17ed0ca3947172df65913f1bac4e9e35e218a7b27340d150ff21d547cfe7b3b4" Oct 01 13:58:36 crc kubenswrapper[4605]: I1001 13:58:36.497413 4605 scope.go:117] "RemoveContainer" containerID="891615b1a922a12aad398310590cf23b716e48f4286e4680601a27f11c7dc2c7" Oct 01 13:58:37 crc kubenswrapper[4605]: I1001 13:58:37.946709 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96610e4e-8a1c-4a4e-92ff-166271766e99" path="/var/lib/kubelet/pods/96610e4e-8a1c-4a4e-92ff-166271766e99/volumes" Oct 01 13:58:39 crc kubenswrapper[4605]: I1001 13:58:39.351154 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-v2kws" Oct 01 13:58:39 crc kubenswrapper[4605]: I1001 13:58:39.351432 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-v2kws" Oct 01 13:58:39 crc kubenswrapper[4605]: I1001 13:58:39.390164 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-v2kws" Oct 01 13:58:39 crc kubenswrapper[4605]: I1001 13:58:39.503882 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-v2kws" Oct 01 13:58:41 crc kubenswrapper[4605]: I1001 13:58:41.475394 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-5b5f977c9c-ztrlb" event={"ID":"471dd8b1-617a-4cba-a9e6-0fc59bfc4b6a","Type":"ContainerStarted","Data":"bde9b74554c6c10dfc928b05280e1137b808604acf01eb9a89110d622eea007d"} Oct 01 13:58:41 crc kubenswrapper[4605]: I1001 13:58:41.621864 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-v2kws"] Oct 01 13:58:41 crc kubenswrapper[4605]: I1001 13:58:41.622522 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-v2kws" podUID="d6a45c94-45c6-4001-8bc0-efec3e892479" containerName="registry-server" containerID="cri-o://543034686f494b1a55d60c57e22971a157953e59e61fdd262b931106808cd33b" gracePeriod=2 Oct 01 13:58:42 crc kubenswrapper[4605]: I1001 13:58:42.089382 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-v2kws" Oct 01 13:58:42 crc kubenswrapper[4605]: I1001 13:58:42.275276 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d6a45c94-45c6-4001-8bc0-efec3e892479-utilities\") pod \"d6a45c94-45c6-4001-8bc0-efec3e892479\" (UID: \"d6a45c94-45c6-4001-8bc0-efec3e892479\") " Oct 01 13:58:42 crc kubenswrapper[4605]: I1001 13:58:42.275352 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d6a45c94-45c6-4001-8bc0-efec3e892479-catalog-content\") pod \"d6a45c94-45c6-4001-8bc0-efec3e892479\" (UID: \"d6a45c94-45c6-4001-8bc0-efec3e892479\") " Oct 01 13:58:42 crc kubenswrapper[4605]: I1001 13:58:42.275397 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2x78d\" (UniqueName: \"kubernetes.io/projected/d6a45c94-45c6-4001-8bc0-efec3e892479-kube-api-access-2x78d\") pod \"d6a45c94-45c6-4001-8bc0-efec3e892479\" (UID: \"d6a45c94-45c6-4001-8bc0-efec3e892479\") " Oct 01 13:58:42 crc kubenswrapper[4605]: I1001 13:58:42.276750 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d6a45c94-45c6-4001-8bc0-efec3e892479-utilities" (OuterVolumeSpecName: "utilities") pod "d6a45c94-45c6-4001-8bc0-efec3e892479" (UID: "d6a45c94-45c6-4001-8bc0-efec3e892479"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 13:58:42 crc kubenswrapper[4605]: I1001 13:58:42.285824 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d6a45c94-45c6-4001-8bc0-efec3e892479-kube-api-access-2x78d" (OuterVolumeSpecName: "kube-api-access-2x78d") pod "d6a45c94-45c6-4001-8bc0-efec3e892479" (UID: "d6a45c94-45c6-4001-8bc0-efec3e892479"). InnerVolumeSpecName "kube-api-access-2x78d". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 13:58:42 crc kubenswrapper[4605]: I1001 13:58:42.346120 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d6a45c94-45c6-4001-8bc0-efec3e892479-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d6a45c94-45c6-4001-8bc0-efec3e892479" (UID: "d6a45c94-45c6-4001-8bc0-efec3e892479"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 13:58:42 crc kubenswrapper[4605]: I1001 13:58:42.376518 4605 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d6a45c94-45c6-4001-8bc0-efec3e892479-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 13:58:42 crc kubenswrapper[4605]: I1001 13:58:42.376554 4605 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d6a45c94-45c6-4001-8bc0-efec3e892479-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 13:58:42 crc kubenswrapper[4605]: I1001 13:58:42.376565 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2x78d\" (UniqueName: \"kubernetes.io/projected/d6a45c94-45c6-4001-8bc0-efec3e892479-kube-api-access-2x78d\") on node \"crc\" DevicePath \"\"" Oct 01 13:58:42 crc kubenswrapper[4605]: I1001 13:58:42.484191 4605 generic.go:334] "Generic (PLEG): container finished" podID="d6a45c94-45c6-4001-8bc0-efec3e892479" containerID="543034686f494b1a55d60c57e22971a157953e59e61fdd262b931106808cd33b" exitCode=0 Oct 01 13:58:42 crc kubenswrapper[4605]: I1001 13:58:42.484223 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v2kws" event={"ID":"d6a45c94-45c6-4001-8bc0-efec3e892479","Type":"ContainerDied","Data":"543034686f494b1a55d60c57e22971a157953e59e61fdd262b931106808cd33b"} Oct 01 13:58:42 crc kubenswrapper[4605]: I1001 13:58:42.484245 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v2kws" event={"ID":"d6a45c94-45c6-4001-8bc0-efec3e892479","Type":"ContainerDied","Data":"b9881b083e0135168d9a042e6c2b740256d2c1d6b8d4c84ae2160b7153b0caec"} Oct 01 13:58:42 crc kubenswrapper[4605]: I1001 13:58:42.484246 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-v2kws" Oct 01 13:58:42 crc kubenswrapper[4605]: I1001 13:58:42.484261 4605 scope.go:117] "RemoveContainer" containerID="543034686f494b1a55d60c57e22971a157953e59e61fdd262b931106808cd33b" Oct 01 13:58:42 crc kubenswrapper[4605]: I1001 13:58:42.535238 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-v2kws"] Oct 01 13:58:42 crc kubenswrapper[4605]: I1001 13:58:42.538774 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-v2kws"] Oct 01 13:58:42 crc kubenswrapper[4605]: I1001 13:58:42.748634 4605 scope.go:117] "RemoveContainer" containerID="04c5872811f6ec08cb54c447f29c99ef5d00e086a9db6f41131f24f7ce10a26a" Oct 01 13:58:42 crc kubenswrapper[4605]: I1001 13:58:42.785920 4605 scope.go:117] "RemoveContainer" containerID="1243dbeaa039f6421384d30156206ec296c7cdaf10a10eaeacd79b31f8eb7661" Oct 01 13:58:42 crc kubenswrapper[4605]: I1001 13:58:42.802317 4605 scope.go:117] "RemoveContainer" containerID="543034686f494b1a55d60c57e22971a157953e59e61fdd262b931106808cd33b" Oct 01 13:58:42 crc kubenswrapper[4605]: E1001 13:58:42.802619 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"543034686f494b1a55d60c57e22971a157953e59e61fdd262b931106808cd33b\": container with ID starting with 543034686f494b1a55d60c57e22971a157953e59e61fdd262b931106808cd33b not found: ID does not exist" containerID="543034686f494b1a55d60c57e22971a157953e59e61fdd262b931106808cd33b" Oct 01 13:58:42 crc kubenswrapper[4605]: I1001 13:58:42.802654 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"543034686f494b1a55d60c57e22971a157953e59e61fdd262b931106808cd33b"} err="failed to get container status \"543034686f494b1a55d60c57e22971a157953e59e61fdd262b931106808cd33b\": rpc error: code = NotFound desc = could not find container \"543034686f494b1a55d60c57e22971a157953e59e61fdd262b931106808cd33b\": container with ID starting with 543034686f494b1a55d60c57e22971a157953e59e61fdd262b931106808cd33b not found: ID does not exist" Oct 01 13:58:42 crc kubenswrapper[4605]: I1001 13:58:42.802675 4605 scope.go:117] "RemoveContainer" containerID="04c5872811f6ec08cb54c447f29c99ef5d00e086a9db6f41131f24f7ce10a26a" Oct 01 13:58:42 crc kubenswrapper[4605]: E1001 13:58:42.802972 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"04c5872811f6ec08cb54c447f29c99ef5d00e086a9db6f41131f24f7ce10a26a\": container with ID starting with 04c5872811f6ec08cb54c447f29c99ef5d00e086a9db6f41131f24f7ce10a26a not found: ID does not exist" containerID="04c5872811f6ec08cb54c447f29c99ef5d00e086a9db6f41131f24f7ce10a26a" Oct 01 13:58:42 crc kubenswrapper[4605]: I1001 13:58:42.802993 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"04c5872811f6ec08cb54c447f29c99ef5d00e086a9db6f41131f24f7ce10a26a"} err="failed to get container status \"04c5872811f6ec08cb54c447f29c99ef5d00e086a9db6f41131f24f7ce10a26a\": rpc error: code = NotFound desc = could not find container \"04c5872811f6ec08cb54c447f29c99ef5d00e086a9db6f41131f24f7ce10a26a\": container with ID starting with 04c5872811f6ec08cb54c447f29c99ef5d00e086a9db6f41131f24f7ce10a26a not found: ID does not exist" Oct 01 13:58:42 crc kubenswrapper[4605]: I1001 13:58:42.803008 4605 scope.go:117] "RemoveContainer" containerID="1243dbeaa039f6421384d30156206ec296c7cdaf10a10eaeacd79b31f8eb7661" Oct 01 13:58:42 crc kubenswrapper[4605]: E1001 13:58:42.803318 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1243dbeaa039f6421384d30156206ec296c7cdaf10a10eaeacd79b31f8eb7661\": container with ID starting with 1243dbeaa039f6421384d30156206ec296c7cdaf10a10eaeacd79b31f8eb7661 not found: ID does not exist" containerID="1243dbeaa039f6421384d30156206ec296c7cdaf10a10eaeacd79b31f8eb7661" Oct 01 13:58:42 crc kubenswrapper[4605]: I1001 13:58:42.803341 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1243dbeaa039f6421384d30156206ec296c7cdaf10a10eaeacd79b31f8eb7661"} err="failed to get container status \"1243dbeaa039f6421384d30156206ec296c7cdaf10a10eaeacd79b31f8eb7661\": rpc error: code = NotFound desc = could not find container \"1243dbeaa039f6421384d30156206ec296c7cdaf10a10eaeacd79b31f8eb7661\": container with ID starting with 1243dbeaa039f6421384d30156206ec296c7cdaf10a10eaeacd79b31f8eb7661 not found: ID does not exist" Oct 01 13:58:43 crc kubenswrapper[4605]: I1001 13:58:43.492231 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-5b5f977c9c-ztrlb" event={"ID":"471dd8b1-617a-4cba-a9e6-0fc59bfc4b6a","Type":"ContainerStarted","Data":"49fdd768ae6b04307599756cae9f019d1f8cbddbe430d71556cdaebb7107a95f"} Oct 01 13:58:43 crc kubenswrapper[4605]: I1001 13:58:43.492395 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-5b5f977c9c-ztrlb" Oct 01 13:58:43 crc kubenswrapper[4605]: I1001 13:58:43.532781 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-5b5f977c9c-ztrlb" podStartSLOduration=2.424594806 podStartE2EDuration="10.532765614s" podCreationTimestamp="2025-10-01 13:58:33 +0000 UTC" firstStartedPulling="2025-10-01 13:58:34.695237757 +0000 UTC m=+837.439213965" lastFinishedPulling="2025-10-01 13:58:42.803408555 +0000 UTC m=+845.547384773" observedRunningTime="2025-10-01 13:58:43.525365526 +0000 UTC m=+846.269341734" watchObservedRunningTime="2025-10-01 13:58:43.532765614 +0000 UTC m=+846.276741822" Oct 01 13:58:43 crc kubenswrapper[4605]: I1001 13:58:43.938873 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d6a45c94-45c6-4001-8bc0-efec3e892479" path="/var/lib/kubelet/pods/d6a45c94-45c6-4001-8bc0-efec3e892479/volumes" Oct 01 13:58:54 crc kubenswrapper[4605]: I1001 13:58:54.323699 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-5b5f977c9c-ztrlb" Oct 01 13:59:10 crc kubenswrapper[4605]: I1001 13:59:10.777359 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-6ff8b75857-td7cl"] Oct 01 13:59:10 crc kubenswrapper[4605]: E1001 13:59:10.778040 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6a45c94-45c6-4001-8bc0-efec3e892479" containerName="registry-server" Oct 01 13:59:10 crc kubenswrapper[4605]: I1001 13:59:10.778051 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6a45c94-45c6-4001-8bc0-efec3e892479" containerName="registry-server" Oct 01 13:59:10 crc kubenswrapper[4605]: E1001 13:59:10.778066 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96610e4e-8a1c-4a4e-92ff-166271766e99" containerName="registry-server" Oct 01 13:59:10 crc kubenswrapper[4605]: I1001 13:59:10.778072 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="96610e4e-8a1c-4a4e-92ff-166271766e99" containerName="registry-server" Oct 01 13:59:10 crc kubenswrapper[4605]: E1001 13:59:10.778084 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96610e4e-8a1c-4a4e-92ff-166271766e99" containerName="extract-utilities" Oct 01 13:59:10 crc kubenswrapper[4605]: I1001 13:59:10.778105 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="96610e4e-8a1c-4a4e-92ff-166271766e99" containerName="extract-utilities" Oct 01 13:59:10 crc kubenswrapper[4605]: E1001 13:59:10.778112 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6a45c94-45c6-4001-8bc0-efec3e892479" containerName="extract-utilities" Oct 01 13:59:10 crc kubenswrapper[4605]: I1001 13:59:10.778118 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6a45c94-45c6-4001-8bc0-efec3e892479" containerName="extract-utilities" Oct 01 13:59:10 crc kubenswrapper[4605]: E1001 13:59:10.778128 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96610e4e-8a1c-4a4e-92ff-166271766e99" containerName="extract-content" Oct 01 13:59:10 crc kubenswrapper[4605]: I1001 13:59:10.778133 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="96610e4e-8a1c-4a4e-92ff-166271766e99" containerName="extract-content" Oct 01 13:59:10 crc kubenswrapper[4605]: E1001 13:59:10.778140 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6a45c94-45c6-4001-8bc0-efec3e892479" containerName="extract-content" Oct 01 13:59:10 crc kubenswrapper[4605]: I1001 13:59:10.778146 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6a45c94-45c6-4001-8bc0-efec3e892479" containerName="extract-content" Oct 01 13:59:10 crc kubenswrapper[4605]: I1001 13:59:10.778250 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="d6a45c94-45c6-4001-8bc0-efec3e892479" containerName="registry-server" Oct 01 13:59:10 crc kubenswrapper[4605]: I1001 13:59:10.778262 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="96610e4e-8a1c-4a4e-92ff-166271766e99" containerName="registry-server" Oct 01 13:59:10 crc kubenswrapper[4605]: I1001 13:59:10.778827 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-td7cl" Oct 01 13:59:10 crc kubenswrapper[4605]: I1001 13:59:10.781293 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-gtjzb" Oct 01 13:59:10 crc kubenswrapper[4605]: I1001 13:59:10.795186 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-644bddb6d8-l6rjg"] Oct 01 13:59:10 crc kubenswrapper[4605]: I1001 13:59:10.796060 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-l6rjg" Oct 01 13:59:10 crc kubenswrapper[4605]: I1001 13:59:10.801247 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-84f4f7b77b-k4j9s"] Oct 01 13:59:10 crc kubenswrapper[4605]: I1001 13:59:10.803331 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-k4j9s" Oct 01 13:59:10 crc kubenswrapper[4605]: I1001 13:59:10.806671 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-hbw8w" Oct 01 13:59:10 crc kubenswrapper[4605]: I1001 13:59:10.811009 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-rdqzz" Oct 01 13:59:10 crc kubenswrapper[4605]: I1001 13:59:10.832227 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-644bddb6d8-l6rjg"] Oct 01 13:59:10 crc kubenswrapper[4605]: I1001 13:59:10.835997 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cl2ws\" (UniqueName: \"kubernetes.io/projected/1c52463d-7f43-422b-b6f2-071553e4efb1-kube-api-access-cl2ws\") pod \"barbican-operator-controller-manager-6ff8b75857-td7cl\" (UID: \"1c52463d-7f43-422b-b6f2-071553e4efb1\") " pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-td7cl" Oct 01 13:59:10 crc kubenswrapper[4605]: I1001 13:59:10.836052 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5lgsp\" (UniqueName: \"kubernetes.io/projected/2f85ca51-dac6-464b-8da5-b2b35511c3a7-kube-api-access-5lgsp\") pod \"designate-operator-controller-manager-84f4f7b77b-k4j9s\" (UID: \"2f85ca51-dac6-464b-8da5-b2b35511c3a7\") " pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-k4j9s" Oct 01 13:59:10 crc kubenswrapper[4605]: I1001 13:59:10.836083 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-682tr\" (UniqueName: \"kubernetes.io/projected/ddfed60b-8b0b-4481-b9f7-f906dd6413f8-kube-api-access-682tr\") pod \"cinder-operator-controller-manager-644bddb6d8-l6rjg\" (UID: \"ddfed60b-8b0b-4481-b9f7-f906dd6413f8\") " pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-l6rjg" Oct 01 13:59:10 crc kubenswrapper[4605]: I1001 13:59:10.840136 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-84f4f7b77b-k4j9s"] Oct 01 13:59:10 crc kubenswrapper[4605]: I1001 13:59:10.878223 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-84958c4d49-r2t7s"] Oct 01 13:59:10 crc kubenswrapper[4605]: I1001 13:59:10.879305 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-r2t7s" Oct 01 13:59:10 crc kubenswrapper[4605]: I1001 13:59:10.887037 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-6zkg8" Oct 01 13:59:10 crc kubenswrapper[4605]: I1001 13:59:10.917139 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-6ff8b75857-td7cl"] Oct 01 13:59:10 crc kubenswrapper[4605]: I1001 13:59:10.938768 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z7fg9\" (UniqueName: \"kubernetes.io/projected/685ab06d-d56b-429c-b196-3f2576a63ad5-kube-api-access-z7fg9\") pod \"glance-operator-controller-manager-84958c4d49-r2t7s\" (UID: \"685ab06d-d56b-429c-b196-3f2576a63ad5\") " pod="openstack-operators/glance-operator-controller-manager-84958c4d49-r2t7s" Oct 01 13:59:10 crc kubenswrapper[4605]: I1001 13:59:10.938839 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cl2ws\" (UniqueName: \"kubernetes.io/projected/1c52463d-7f43-422b-b6f2-071553e4efb1-kube-api-access-cl2ws\") pod \"barbican-operator-controller-manager-6ff8b75857-td7cl\" (UID: \"1c52463d-7f43-422b-b6f2-071553e4efb1\") " pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-td7cl" Oct 01 13:59:10 crc kubenswrapper[4605]: I1001 13:59:10.938865 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5lgsp\" (UniqueName: \"kubernetes.io/projected/2f85ca51-dac6-464b-8da5-b2b35511c3a7-kube-api-access-5lgsp\") pod \"designate-operator-controller-manager-84f4f7b77b-k4j9s\" (UID: \"2f85ca51-dac6-464b-8da5-b2b35511c3a7\") " pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-k4j9s" Oct 01 13:59:10 crc kubenswrapper[4605]: I1001 13:59:10.938884 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-682tr\" (UniqueName: \"kubernetes.io/projected/ddfed60b-8b0b-4481-b9f7-f906dd6413f8-kube-api-access-682tr\") pod \"cinder-operator-controller-manager-644bddb6d8-l6rjg\" (UID: \"ddfed60b-8b0b-4481-b9f7-f906dd6413f8\") " pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-l6rjg" Oct 01 13:59:10 crc kubenswrapper[4605]: I1001 13:59:10.941198 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-5d889d78cf-bc7cq"] Oct 01 13:59:10 crc kubenswrapper[4605]: I1001 13:59:10.942241 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-bc7cq" Oct 01 13:59:10 crc kubenswrapper[4605]: I1001 13:59:10.948397 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-pmvwk" Oct 01 13:59:10 crc kubenswrapper[4605]: I1001 13:59:10.951700 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-84958c4d49-r2t7s"] Oct 01 13:59:10 crc kubenswrapper[4605]: I1001 13:59:10.969422 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-682tr\" (UniqueName: \"kubernetes.io/projected/ddfed60b-8b0b-4481-b9f7-f906dd6413f8-kube-api-access-682tr\") pod \"cinder-operator-controller-manager-644bddb6d8-l6rjg\" (UID: \"ddfed60b-8b0b-4481-b9f7-f906dd6413f8\") " pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-l6rjg" Oct 01 13:59:10 crc kubenswrapper[4605]: I1001 13:59:10.976673 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5lgsp\" (UniqueName: \"kubernetes.io/projected/2f85ca51-dac6-464b-8da5-b2b35511c3a7-kube-api-access-5lgsp\") pod \"designate-operator-controller-manager-84f4f7b77b-k4j9s\" (UID: \"2f85ca51-dac6-464b-8da5-b2b35511c3a7\") " pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-k4j9s" Oct 01 13:59:10 crc kubenswrapper[4605]: I1001 13:59:10.986221 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5d889d78cf-bc7cq"] Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.000325 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cl2ws\" (UniqueName: \"kubernetes.io/projected/1c52463d-7f43-422b-b6f2-071553e4efb1-kube-api-access-cl2ws\") pod \"barbican-operator-controller-manager-6ff8b75857-td7cl\" (UID: \"1c52463d-7f43-422b-b6f2-071553e4efb1\") " pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-td7cl" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.000413 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-9f4696d94-4gzrq"] Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.001610 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-4gzrq" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.006624 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-7q7x2" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.028440 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-9f4696d94-4gzrq"] Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.031484 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-9d6c5db85-p5vqs"] Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.032736 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-9d6c5db85-p5vqs" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.040005 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mldhl\" (UniqueName: \"kubernetes.io/projected/6728814d-8d86-4255-8e33-c2205cc3421b-kube-api-access-mldhl\") pod \"heat-operator-controller-manager-5d889d78cf-bc7cq\" (UID: \"6728814d-8d86-4255-8e33-c2205cc3421b\") " pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-bc7cq" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.040138 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z7fg9\" (UniqueName: \"kubernetes.io/projected/685ab06d-d56b-429c-b196-3f2576a63ad5-kube-api-access-z7fg9\") pod \"glance-operator-controller-manager-84958c4d49-r2t7s\" (UID: \"685ab06d-d56b-429c-b196-3f2576a63ad5\") " pod="openstack-operators/glance-operator-controller-manager-84958c4d49-r2t7s" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.040251 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cmzbc\" (UniqueName: \"kubernetes.io/projected/114ac89a-6b52-4e58-8ec7-1a5ebe953e46-kube-api-access-cmzbc\") pod \"horizon-operator-controller-manager-9f4696d94-4gzrq\" (UID: \"114ac89a-6b52-4e58-8ec7-1a5ebe953e46\") " pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-4gzrq" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.046182 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.046333 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-klqzc" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.093148 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-665ff6bffd-fdn7m"] Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.094116 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-5cd4858477-sb7tm"] Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.094840 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-5cd4858477-sb7tm" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.095240 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-665ff6bffd-fdn7m" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.100596 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-6j28d" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.100848 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-srhwj" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.101464 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-665ff6bffd-fdn7m"] Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.104717 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-5cd4858477-sb7tm"] Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.110380 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z7fg9\" (UniqueName: \"kubernetes.io/projected/685ab06d-d56b-429c-b196-3f2576a63ad5-kube-api-access-z7fg9\") pod \"glance-operator-controller-manager-84958c4d49-r2t7s\" (UID: \"685ab06d-d56b-429c-b196-3f2576a63ad5\") " pod="openstack-operators/glance-operator-controller-manager-84958c4d49-r2t7s" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.113201 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-6d68dbc695-kxwrs"] Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.114117 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-kxwrs" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.117887 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-bn8ks" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.118368 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-td7cl" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.132630 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-k4j9s" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.140448 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-9d6c5db85-p5vqs"] Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.141640 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mldhl\" (UniqueName: \"kubernetes.io/projected/6728814d-8d86-4255-8e33-c2205cc3421b-kube-api-access-mldhl\") pod \"heat-operator-controller-manager-5d889d78cf-bc7cq\" (UID: \"6728814d-8d86-4255-8e33-c2205cc3421b\") " pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-bc7cq" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.141684 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cg58s\" (UniqueName: \"kubernetes.io/projected/a3fbdb59-b188-4842-af73-d3c68afd58ff-kube-api-access-cg58s\") pod \"ironic-operator-controller-manager-5cd4858477-sb7tm\" (UID: \"a3fbdb59-b188-4842-af73-d3c68afd58ff\") " pod="openstack-operators/ironic-operator-controller-manager-5cd4858477-sb7tm" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.141717 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6kc9g\" (UniqueName: \"kubernetes.io/projected/e578b0b7-de64-4492-9ab3-b8b73ebd0909-kube-api-access-6kc9g\") pod \"manila-operator-controller-manager-6d68dbc695-kxwrs\" (UID: \"e578b0b7-de64-4492-9ab3-b8b73ebd0909\") " pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-kxwrs" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.141746 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/769cd151-8943-4faa-876c-e91d749ef107-cert\") pod \"infra-operator-controller-manager-9d6c5db85-p5vqs\" (UID: \"769cd151-8943-4faa-876c-e91d749ef107\") " pod="openstack-operators/infra-operator-controller-manager-9d6c5db85-p5vqs" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.141762 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s2wlq\" (UniqueName: \"kubernetes.io/projected/769cd151-8943-4faa-876c-e91d749ef107-kube-api-access-s2wlq\") pod \"infra-operator-controller-manager-9d6c5db85-p5vqs\" (UID: \"769cd151-8943-4faa-876c-e91d749ef107\") " pod="openstack-operators/infra-operator-controller-manager-9d6c5db85-p5vqs" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.141796 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-swvkr\" (UniqueName: \"kubernetes.io/projected/416364d4-8fac-4979-b4f1-e1f009f0b8cd-kube-api-access-swvkr\") pod \"keystone-operator-controller-manager-665ff6bffd-fdn7m\" (UID: \"416364d4-8fac-4979-b4f1-e1f009f0b8cd\") " pod="openstack-operators/keystone-operator-controller-manager-665ff6bffd-fdn7m" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.141818 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cmzbc\" (UniqueName: \"kubernetes.io/projected/114ac89a-6b52-4e58-8ec7-1a5ebe953e46-kube-api-access-cmzbc\") pod \"horizon-operator-controller-manager-9f4696d94-4gzrq\" (UID: \"114ac89a-6b52-4e58-8ec7-1a5ebe953e46\") " pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-4gzrq" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.147067 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-l6rjg" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.157916 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-6d68dbc695-kxwrs"] Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.180886 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cmzbc\" (UniqueName: \"kubernetes.io/projected/114ac89a-6b52-4e58-8ec7-1a5ebe953e46-kube-api-access-cmzbc\") pod \"horizon-operator-controller-manager-9f4696d94-4gzrq\" (UID: \"114ac89a-6b52-4e58-8ec7-1a5ebe953e46\") " pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-4gzrq" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.202367 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-r2t7s" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.219932 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-88c7-rscdq"] Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.221272 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-88c7-rscdq" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.223244 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-4lqzr" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.229566 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mldhl\" (UniqueName: \"kubernetes.io/projected/6728814d-8d86-4255-8e33-c2205cc3421b-kube-api-access-mldhl\") pod \"heat-operator-controller-manager-5d889d78cf-bc7cq\" (UID: \"6728814d-8d86-4255-8e33-c2205cc3421b\") " pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-bc7cq" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.242691 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cg58s\" (UniqueName: \"kubernetes.io/projected/a3fbdb59-b188-4842-af73-d3c68afd58ff-kube-api-access-cg58s\") pod \"ironic-operator-controller-manager-5cd4858477-sb7tm\" (UID: \"a3fbdb59-b188-4842-af73-d3c68afd58ff\") " pod="openstack-operators/ironic-operator-controller-manager-5cd4858477-sb7tm" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.243008 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6kc9g\" (UniqueName: \"kubernetes.io/projected/e578b0b7-de64-4492-9ab3-b8b73ebd0909-kube-api-access-6kc9g\") pod \"manila-operator-controller-manager-6d68dbc695-kxwrs\" (UID: \"e578b0b7-de64-4492-9ab3-b8b73ebd0909\") " pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-kxwrs" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.243039 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/769cd151-8943-4faa-876c-e91d749ef107-cert\") pod \"infra-operator-controller-manager-9d6c5db85-p5vqs\" (UID: \"769cd151-8943-4faa-876c-e91d749ef107\") " pod="openstack-operators/infra-operator-controller-manager-9d6c5db85-p5vqs" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.243056 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2wlq\" (UniqueName: \"kubernetes.io/projected/769cd151-8943-4faa-876c-e91d749ef107-kube-api-access-s2wlq\") pod \"infra-operator-controller-manager-9d6c5db85-p5vqs\" (UID: \"769cd151-8943-4faa-876c-e91d749ef107\") " pod="openstack-operators/infra-operator-controller-manager-9d6c5db85-p5vqs" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.243108 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-swvkr\" (UniqueName: \"kubernetes.io/projected/416364d4-8fac-4979-b4f1-e1f009f0b8cd-kube-api-access-swvkr\") pod \"keystone-operator-controller-manager-665ff6bffd-fdn7m\" (UID: \"416364d4-8fac-4979-b4f1-e1f009f0b8cd\") " pod="openstack-operators/keystone-operator-controller-manager-665ff6bffd-fdn7m" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.243149 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xfxjn\" (UniqueName: \"kubernetes.io/projected/5db7ac6b-c1e1-4640-943f-9db9a460e625-kube-api-access-xfxjn\") pod \"mariadb-operator-controller-manager-88c7-rscdq\" (UID: \"5db7ac6b-c1e1-4640-943f-9db9a460e625\") " pod="openstack-operators/mariadb-operator-controller-manager-88c7-rscdq" Oct 01 13:59:11 crc kubenswrapper[4605]: E1001 13:59:11.243588 4605 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Oct 01 13:59:11 crc kubenswrapper[4605]: E1001 13:59:11.243636 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/769cd151-8943-4faa-876c-e91d749ef107-cert podName:769cd151-8943-4faa-876c-e91d749ef107 nodeName:}" failed. No retries permitted until 2025-10-01 13:59:11.743617903 +0000 UTC m=+874.487594101 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/769cd151-8943-4faa-876c-e91d749ef107-cert") pod "infra-operator-controller-manager-9d6c5db85-p5vqs" (UID: "769cd151-8943-4faa-876c-e91d749ef107") : secret "infra-operator-webhook-server-cert" not found Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.250318 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-849d5b9b84-b2fzm"] Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.251263 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-849d5b9b84-b2fzm" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.253402 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-99rx8" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.262723 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-bc7cq" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.270628 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-88c7-rscdq"] Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.282703 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cg58s\" (UniqueName: \"kubernetes.io/projected/a3fbdb59-b188-4842-af73-d3c68afd58ff-kube-api-access-cg58s\") pod \"ironic-operator-controller-manager-5cd4858477-sb7tm\" (UID: \"a3fbdb59-b188-4842-af73-d3c68afd58ff\") " pod="openstack-operators/ironic-operator-controller-manager-5cd4858477-sb7tm" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.292517 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6kc9g\" (UniqueName: \"kubernetes.io/projected/e578b0b7-de64-4492-9ab3-b8b73ebd0909-kube-api-access-6kc9g\") pod \"manila-operator-controller-manager-6d68dbc695-kxwrs\" (UID: \"e578b0b7-de64-4492-9ab3-b8b73ebd0909\") " pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-kxwrs" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.303728 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2wlq\" (UniqueName: \"kubernetes.io/projected/769cd151-8943-4faa-876c-e91d749ef107-kube-api-access-s2wlq\") pod \"infra-operator-controller-manager-9d6c5db85-p5vqs\" (UID: \"769cd151-8943-4faa-876c-e91d749ef107\") " pod="openstack-operators/infra-operator-controller-manager-9d6c5db85-p5vqs" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.308804 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-849d5b9b84-b2fzm"] Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.310660 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-swvkr\" (UniqueName: \"kubernetes.io/projected/416364d4-8fac-4979-b4f1-e1f009f0b8cd-kube-api-access-swvkr\") pod \"keystone-operator-controller-manager-665ff6bffd-fdn7m\" (UID: \"416364d4-8fac-4979-b4f1-e1f009f0b8cd\") " pod="openstack-operators/keystone-operator-controller-manager-665ff6bffd-fdn7m" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.328118 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-64cd67b5cb-xrh9t"] Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.329317 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-64cd67b5cb-xrh9t" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.331570 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-fchdc" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.337283 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-7b787867f4-cx7q7"] Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.339362 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-7b787867f4-cx7q7" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.344147 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-lrljg" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.346546 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-4gzrq" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.346822 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z8srb\" (UniqueName: \"kubernetes.io/projected/6ee4d18f-3f02-49c4-943c-534e47601be5-kube-api-access-z8srb\") pod \"neutron-operator-controller-manager-849d5b9b84-b2fzm\" (UID: \"6ee4d18f-3f02-49c4-943c-534e47601be5\") " pod="openstack-operators/neutron-operator-controller-manager-849d5b9b84-b2fzm" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.346982 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xfxjn\" (UniqueName: \"kubernetes.io/projected/5db7ac6b-c1e1-4640-943f-9db9a460e625-kube-api-access-xfxjn\") pod \"mariadb-operator-controller-manager-88c7-rscdq\" (UID: \"5db7ac6b-c1e1-4640-943f-9db9a460e625\") " pod="openstack-operators/mariadb-operator-controller-manager-88c7-rscdq" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.347107 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tvf2t\" (UniqueName: \"kubernetes.io/projected/21d80fce-11c1-4ca0-8687-dc2bb6ced356-kube-api-access-tvf2t\") pod \"nova-operator-controller-manager-64cd67b5cb-xrh9t\" (UID: \"21d80fce-11c1-4ca0-8687-dc2bb6ced356\") " pod="openstack-operators/nova-operator-controller-manager-64cd67b5cb-xrh9t" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.371067 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-7b787867f4-cx7q7"] Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.384308 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-64cd67b5cb-xrh9t"] Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.388683 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xfxjn\" (UniqueName: \"kubernetes.io/projected/5db7ac6b-c1e1-4640-943f-9db9a460e625-kube-api-access-xfxjn\") pod \"mariadb-operator-controller-manager-88c7-rscdq\" (UID: \"5db7ac6b-c1e1-4640-943f-9db9a460e625\") " pod="openstack-operators/mariadb-operator-controller-manager-88c7-rscdq" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.437416 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-77b9676b8cqd6pb"] Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.453673 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z8srb\" (UniqueName: \"kubernetes.io/projected/6ee4d18f-3f02-49c4-943c-534e47601be5-kube-api-access-z8srb\") pod \"neutron-operator-controller-manager-849d5b9b84-b2fzm\" (UID: \"6ee4d18f-3f02-49c4-943c-534e47601be5\") " pod="openstack-operators/neutron-operator-controller-manager-849d5b9b84-b2fzm" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.453723 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-66ksk\" (UniqueName: \"kubernetes.io/projected/f105c6d3-5a2b-442c-ad1c-bcffd3fd869b-kube-api-access-66ksk\") pod \"octavia-operator-controller-manager-7b787867f4-cx7q7\" (UID: \"f105c6d3-5a2b-442c-ad1c-bcffd3fd869b\") " pod="openstack-operators/octavia-operator-controller-manager-7b787867f4-cx7q7" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.453757 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tvf2t\" (UniqueName: \"kubernetes.io/projected/21d80fce-11c1-4ca0-8687-dc2bb6ced356-kube-api-access-tvf2t\") pod \"nova-operator-controller-manager-64cd67b5cb-xrh9t\" (UID: \"21d80fce-11c1-4ca0-8687-dc2bb6ced356\") " pod="openstack-operators/nova-operator-controller-manager-64cd67b5cb-xrh9t" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.457768 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-77b9676b8cqd6pb" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.471666 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-x76mp" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.472323 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.473066 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-9976ff44c-8sgk8"] Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.502579 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-8sgk8" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.519692 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-5cd4858477-sb7tm" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.531682 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-665ff6bffd-fdn7m" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.537102 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-77b9676b8cqd6pb"] Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.606499 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tvf2t\" (UniqueName: \"kubernetes.io/projected/21d80fce-11c1-4ca0-8687-dc2bb6ced356-kube-api-access-tvf2t\") pod \"nova-operator-controller-manager-64cd67b5cb-xrh9t\" (UID: \"21d80fce-11c1-4ca0-8687-dc2bb6ced356\") " pod="openstack-operators/nova-operator-controller-manager-64cd67b5cb-xrh9t" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.608439 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-rdnx2" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.622884 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-kxwrs" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.623157 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-88c7-rscdq" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.623888 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-66ksk\" (UniqueName: \"kubernetes.io/projected/f105c6d3-5a2b-442c-ad1c-bcffd3fd869b-kube-api-access-66ksk\") pod \"octavia-operator-controller-manager-7b787867f4-cx7q7\" (UID: \"f105c6d3-5a2b-442c-ad1c-bcffd3fd869b\") " pod="openstack-operators/octavia-operator-controller-manager-7b787867f4-cx7q7" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.633236 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z8srb\" (UniqueName: \"kubernetes.io/projected/6ee4d18f-3f02-49c4-943c-534e47601be5-kube-api-access-z8srb\") pod \"neutron-operator-controller-manager-849d5b9b84-b2fzm\" (UID: \"6ee4d18f-3f02-49c4-943c-534e47601be5\") " pod="openstack-operators/neutron-operator-controller-manager-849d5b9b84-b2fzm" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.654126 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-64cd67b5cb-xrh9t" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.670156 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-9976ff44c-8sgk8"] Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.689384 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-589c58c6c-tf7ln"] Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.690590 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-tf7ln" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.700495 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-6mk4b" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.726955 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4c039eff-8d65-45d5-9c1a-9fddca3c5e57-cert\") pod \"openstack-baremetal-operator-controller-manager-77b9676b8cqd6pb\" (UID: \"4c039eff-8d65-45d5-9c1a-9fddca3c5e57\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-77b9676b8cqd6pb" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.727083 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pgwh2\" (UniqueName: \"kubernetes.io/projected/4c039eff-8d65-45d5-9c1a-9fddca3c5e57-kube-api-access-pgwh2\") pod \"openstack-baremetal-operator-controller-manager-77b9676b8cqd6pb\" (UID: \"4c039eff-8d65-45d5-9c1a-9fddca3c5e57\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-77b9676b8cqd6pb" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.727173 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5glqf\" (UniqueName: \"kubernetes.io/projected/d861e141-379a-4d47-bca7-bff86972afaa-kube-api-access-5glqf\") pod \"ovn-operator-controller-manager-9976ff44c-8sgk8\" (UID: \"d861e141-379a-4d47-bca7-bff86972afaa\") " pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-8sgk8" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.741591 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-66ksk\" (UniqueName: \"kubernetes.io/projected/f105c6d3-5a2b-442c-ad1c-bcffd3fd869b-kube-api-access-66ksk\") pod \"octavia-operator-controller-manager-7b787867f4-cx7q7\" (UID: \"f105c6d3-5a2b-442c-ad1c-bcffd3fd869b\") " pod="openstack-operators/octavia-operator-controller-manager-7b787867f4-cx7q7" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.743856 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-84d6b4b759-xdntp"] Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.770368 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-84d6b4b759-xdntp" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.788445 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-b68zg" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.791857 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-x8fvl"] Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.792967 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-x8fvl" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.818477 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-584sz" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.852565 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4c039eff-8d65-45d5-9c1a-9fddca3c5e57-cert\") pod \"openstack-baremetal-operator-controller-manager-77b9676b8cqd6pb\" (UID: \"4c039eff-8d65-45d5-9c1a-9fddca3c5e57\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-77b9676b8cqd6pb" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.852602 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/769cd151-8943-4faa-876c-e91d749ef107-cert\") pod \"infra-operator-controller-manager-9d6c5db85-p5vqs\" (UID: \"769cd151-8943-4faa-876c-e91d749ef107\") " pod="openstack-operators/infra-operator-controller-manager-9d6c5db85-p5vqs" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.852634 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wffxl\" (UniqueName: \"kubernetes.io/projected/34738360-dd91-4a55-b6d2-ab69d1bb5db4-kube-api-access-wffxl\") pod \"swift-operator-controller-manager-84d6b4b759-xdntp\" (UID: \"34738360-dd91-4a55-b6d2-ab69d1bb5db4\") " pod="openstack-operators/swift-operator-controller-manager-84d6b4b759-xdntp" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.852665 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gf96h\" (UniqueName: \"kubernetes.io/projected/97fdc8f2-0472-4957-a59a-fd9474c0d15c-kube-api-access-gf96h\") pod \"placement-operator-controller-manager-589c58c6c-tf7ln\" (UID: \"97fdc8f2-0472-4957-a59a-fd9474c0d15c\") " pod="openstack-operators/placement-operator-controller-manager-589c58c6c-tf7ln" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.852683 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pgwh2\" (UniqueName: \"kubernetes.io/projected/4c039eff-8d65-45d5-9c1a-9fddca3c5e57-kube-api-access-pgwh2\") pod \"openstack-baremetal-operator-controller-manager-77b9676b8cqd6pb\" (UID: \"4c039eff-8d65-45d5-9c1a-9fddca3c5e57\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-77b9676b8cqd6pb" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.852714 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5glqf\" (UniqueName: \"kubernetes.io/projected/d861e141-379a-4d47-bca7-bff86972afaa-kube-api-access-5glqf\") pod \"ovn-operator-controller-manager-9976ff44c-8sgk8\" (UID: \"d861e141-379a-4d47-bca7-bff86972afaa\") " pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-8sgk8" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.852738 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6zcq8\" (UniqueName: \"kubernetes.io/projected/8dcbd1dd-75c6-40ff-a9ea-267f9be92433-kube-api-access-6zcq8\") pod \"telemetry-operator-controller-manager-b8d54b5d7-x8fvl\" (UID: \"8dcbd1dd-75c6-40ff-a9ea-267f9be92433\") " pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-x8fvl" Oct 01 13:59:11 crc kubenswrapper[4605]: E1001 13:59:11.852863 4605 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Oct 01 13:59:11 crc kubenswrapper[4605]: E1001 13:59:11.852903 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4c039eff-8d65-45d5-9c1a-9fddca3c5e57-cert podName:4c039eff-8d65-45d5-9c1a-9fddca3c5e57 nodeName:}" failed. No retries permitted until 2025-10-01 13:59:12.352890382 +0000 UTC m=+875.096866590 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/4c039eff-8d65-45d5-9c1a-9fddca3c5e57-cert") pod "openstack-baremetal-operator-controller-manager-77b9676b8cqd6pb" (UID: "4c039eff-8d65-45d5-9c1a-9fddca3c5e57") : secret "openstack-baremetal-operator-webhook-server-cert" not found Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.855659 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-84d6b4b759-xdntp"] Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.865140 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/769cd151-8943-4faa-876c-e91d749ef107-cert\") pod \"infra-operator-controller-manager-9d6c5db85-p5vqs\" (UID: \"769cd151-8943-4faa-876c-e91d749ef107\") " pod="openstack-operators/infra-operator-controller-manager-9d6c5db85-p5vqs" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.882420 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-589c58c6c-tf7ln"] Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.887888 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pgwh2\" (UniqueName: \"kubernetes.io/projected/4c039eff-8d65-45d5-9c1a-9fddca3c5e57-kube-api-access-pgwh2\") pod \"openstack-baremetal-operator-controller-manager-77b9676b8cqd6pb\" (UID: \"4c039eff-8d65-45d5-9c1a-9fddca3c5e57\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-77b9676b8cqd6pb" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.900039 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-x8fvl"] Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.904466 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5glqf\" (UniqueName: \"kubernetes.io/projected/d861e141-379a-4d47-bca7-bff86972afaa-kube-api-access-5glqf\") pod \"ovn-operator-controller-manager-9976ff44c-8sgk8\" (UID: \"d861e141-379a-4d47-bca7-bff86972afaa\") " pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-8sgk8" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.920394 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-8sgk8" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.922137 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-85777745bb-8rcr7"] Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.927379 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-849d5b9b84-b2fzm" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.937300 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-85777745bb-8rcr7" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.943363 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-rmsr7" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.956772 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gf96h\" (UniqueName: \"kubernetes.io/projected/97fdc8f2-0472-4957-a59a-fd9474c0d15c-kube-api-access-gf96h\") pod \"placement-operator-controller-manager-589c58c6c-tf7ln\" (UID: \"97fdc8f2-0472-4957-a59a-fd9474c0d15c\") " pod="openstack-operators/placement-operator-controller-manager-589c58c6c-tf7ln" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.956835 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6zcq8\" (UniqueName: \"kubernetes.io/projected/8dcbd1dd-75c6-40ff-a9ea-267f9be92433-kube-api-access-6zcq8\") pod \"telemetry-operator-controller-manager-b8d54b5d7-x8fvl\" (UID: \"8dcbd1dd-75c6-40ff-a9ea-267f9be92433\") " pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-x8fvl" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.956906 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wffxl\" (UniqueName: \"kubernetes.io/projected/34738360-dd91-4a55-b6d2-ab69d1bb5db4-kube-api-access-wffxl\") pod \"swift-operator-controller-manager-84d6b4b759-xdntp\" (UID: \"34738360-dd91-4a55-b6d2-ab69d1bb5db4\") " pod="openstack-operators/swift-operator-controller-manager-84d6b4b759-xdntp" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.963394 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-9d6c5db85-p5vqs" Oct 01 13:59:11 crc kubenswrapper[4605]: I1001 13:59:11.976182 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-7b787867f4-cx7q7" Oct 01 13:59:12 crc kubenswrapper[4605]: I1001 13:59:12.031291 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wffxl\" (UniqueName: \"kubernetes.io/projected/34738360-dd91-4a55-b6d2-ab69d1bb5db4-kube-api-access-wffxl\") pod \"swift-operator-controller-manager-84d6b4b759-xdntp\" (UID: \"34738360-dd91-4a55-b6d2-ab69d1bb5db4\") " pod="openstack-operators/swift-operator-controller-manager-84d6b4b759-xdntp" Oct 01 13:59:12 crc kubenswrapper[4605]: I1001 13:59:12.055856 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gf96h\" (UniqueName: \"kubernetes.io/projected/97fdc8f2-0472-4957-a59a-fd9474c0d15c-kube-api-access-gf96h\") pod \"placement-operator-controller-manager-589c58c6c-tf7ln\" (UID: \"97fdc8f2-0472-4957-a59a-fd9474c0d15c\") " pod="openstack-operators/placement-operator-controller-manager-589c58c6c-tf7ln" Oct 01 13:59:12 crc kubenswrapper[4605]: I1001 13:59:12.069314 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hh8ww\" (UniqueName: \"kubernetes.io/projected/6e215b5d-0b5c-4587-b8ab-f2a63fb41cb1-kube-api-access-hh8ww\") pod \"test-operator-controller-manager-85777745bb-8rcr7\" (UID: \"6e215b5d-0b5c-4587-b8ab-f2a63fb41cb1\") " pod="openstack-operators/test-operator-controller-manager-85777745bb-8rcr7" Oct 01 13:59:12 crc kubenswrapper[4605]: I1001 13:59:12.070110 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6zcq8\" (UniqueName: \"kubernetes.io/projected/8dcbd1dd-75c6-40ff-a9ea-267f9be92433-kube-api-access-6zcq8\") pod \"telemetry-operator-controller-manager-b8d54b5d7-x8fvl\" (UID: \"8dcbd1dd-75c6-40ff-a9ea-267f9be92433\") " pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-x8fvl" Oct 01 13:59:12 crc kubenswrapper[4605]: I1001 13:59:12.090248 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-6b9957f54f-ct9tb"] Oct 01 13:59:12 crc kubenswrapper[4605]: I1001 13:59:12.097045 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-6b9957f54f-ct9tb" Oct 01 13:59:12 crc kubenswrapper[4605]: I1001 13:59:12.106589 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-kj9pg" Oct 01 13:59:12 crc kubenswrapper[4605]: I1001 13:59:12.107988 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-85777745bb-8rcr7"] Oct 01 13:59:12 crc kubenswrapper[4605]: I1001 13:59:12.114607 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-6b9957f54f-ct9tb"] Oct 01 13:59:12 crc kubenswrapper[4605]: I1001 13:59:12.153406 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-84d6b4b759-xdntp" Oct 01 13:59:12 crc kubenswrapper[4605]: I1001 13:59:12.171377 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hh8ww\" (UniqueName: \"kubernetes.io/projected/6e215b5d-0b5c-4587-b8ab-f2a63fb41cb1-kube-api-access-hh8ww\") pod \"test-operator-controller-manager-85777745bb-8rcr7\" (UID: \"6e215b5d-0b5c-4587-b8ab-f2a63fb41cb1\") " pod="openstack-operators/test-operator-controller-manager-85777745bb-8rcr7" Oct 01 13:59:12 crc kubenswrapper[4605]: I1001 13:59:12.171495 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gbp6v\" (UniqueName: \"kubernetes.io/projected/baf8d82f-41e9-417a-9e88-4320b65d7c6c-kube-api-access-gbp6v\") pod \"watcher-operator-controller-manager-6b9957f54f-ct9tb\" (UID: \"baf8d82f-41e9-417a-9e88-4320b65d7c6c\") " pod="openstack-operators/watcher-operator-controller-manager-6b9957f54f-ct9tb" Oct 01 13:59:12 crc kubenswrapper[4605]: I1001 13:59:12.200449 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-x8fvl" Oct 01 13:59:12 crc kubenswrapper[4605]: I1001 13:59:12.221161 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hh8ww\" (UniqueName: \"kubernetes.io/projected/6e215b5d-0b5c-4587-b8ab-f2a63fb41cb1-kube-api-access-hh8ww\") pod \"test-operator-controller-manager-85777745bb-8rcr7\" (UID: \"6e215b5d-0b5c-4587-b8ab-f2a63fb41cb1\") " pod="openstack-operators/test-operator-controller-manager-85777745bb-8rcr7" Oct 01 13:59:12 crc kubenswrapper[4605]: I1001 13:59:12.248144 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-6565f9cdf-zgq4z"] Oct 01 13:59:12 crc kubenswrapper[4605]: I1001 13:59:12.249508 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-6565f9cdf-zgq4z" Oct 01 13:59:12 crc kubenswrapper[4605]: I1001 13:59:12.253504 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-6565f9cdf-zgq4z"] Oct 01 13:59:12 crc kubenswrapper[4605]: I1001 13:59:12.258947 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-6ff8b75857-td7cl"] Oct 01 13:59:12 crc kubenswrapper[4605]: I1001 13:59:12.268619 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-q9tlp" Oct 01 13:59:12 crc kubenswrapper[4605]: I1001 13:59:12.268651 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Oct 01 13:59:12 crc kubenswrapper[4605]: I1001 13:59:12.274058 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gbp6v\" (UniqueName: \"kubernetes.io/projected/baf8d82f-41e9-417a-9e88-4320b65d7c6c-kube-api-access-gbp6v\") pod \"watcher-operator-controller-manager-6b9957f54f-ct9tb\" (UID: \"baf8d82f-41e9-417a-9e88-4320b65d7c6c\") " pod="openstack-operators/watcher-operator-controller-manager-6b9957f54f-ct9tb" Oct 01 13:59:12 crc kubenswrapper[4605]: I1001 13:59:12.298683 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gbp6v\" (UniqueName: \"kubernetes.io/projected/baf8d82f-41e9-417a-9e88-4320b65d7c6c-kube-api-access-gbp6v\") pod \"watcher-operator-controller-manager-6b9957f54f-ct9tb\" (UID: \"baf8d82f-41e9-417a-9e88-4320b65d7c6c\") " pod="openstack-operators/watcher-operator-controller-manager-6b9957f54f-ct9tb" Oct 01 13:59:12 crc kubenswrapper[4605]: I1001 13:59:12.332966 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-tf7ln" Oct 01 13:59:12 crc kubenswrapper[4605]: I1001 13:59:12.335843 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-9dhn9"] Oct 01 13:59:12 crc kubenswrapper[4605]: I1001 13:59:12.338593 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-9dhn9" Oct 01 13:59:12 crc kubenswrapper[4605]: I1001 13:59:12.353397 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-8hgw8" Oct 01 13:59:12 crc kubenswrapper[4605]: I1001 13:59:12.354666 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-9dhn9"] Oct 01 13:59:12 crc kubenswrapper[4605]: I1001 13:59:12.375394 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4c039eff-8d65-45d5-9c1a-9fddca3c5e57-cert\") pod \"openstack-baremetal-operator-controller-manager-77b9676b8cqd6pb\" (UID: \"4c039eff-8d65-45d5-9c1a-9fddca3c5e57\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-77b9676b8cqd6pb" Oct 01 13:59:12 crc kubenswrapper[4605]: I1001 13:59:12.375586 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-45vq2\" (UniqueName: \"kubernetes.io/projected/7ba2ae30-a4df-43a7-b6bc-89814bd65ab7-kube-api-access-45vq2\") pod \"openstack-operator-controller-manager-6565f9cdf-zgq4z\" (UID: \"7ba2ae30-a4df-43a7-b6bc-89814bd65ab7\") " pod="openstack-operators/openstack-operator-controller-manager-6565f9cdf-zgq4z" Oct 01 13:59:12 crc kubenswrapper[4605]: I1001 13:59:12.375688 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7ba2ae30-a4df-43a7-b6bc-89814bd65ab7-cert\") pod \"openstack-operator-controller-manager-6565f9cdf-zgq4z\" (UID: \"7ba2ae30-a4df-43a7-b6bc-89814bd65ab7\") " pod="openstack-operators/openstack-operator-controller-manager-6565f9cdf-zgq4z" Oct 01 13:59:12 crc kubenswrapper[4605]: E1001 13:59:12.377221 4605 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Oct 01 13:59:12 crc kubenswrapper[4605]: E1001 13:59:12.377304 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4c039eff-8d65-45d5-9c1a-9fddca3c5e57-cert podName:4c039eff-8d65-45d5-9c1a-9fddca3c5e57 nodeName:}" failed. No retries permitted until 2025-10-01 13:59:13.377283626 +0000 UTC m=+876.121259884 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/4c039eff-8d65-45d5-9c1a-9fddca3c5e57-cert") pod "openstack-baremetal-operator-controller-manager-77b9676b8cqd6pb" (UID: "4c039eff-8d65-45d5-9c1a-9fddca3c5e57") : secret "openstack-baremetal-operator-webhook-server-cert" not found Oct 01 13:59:12 crc kubenswrapper[4605]: I1001 13:59:12.479687 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-45vq2\" (UniqueName: \"kubernetes.io/projected/7ba2ae30-a4df-43a7-b6bc-89814bd65ab7-kube-api-access-45vq2\") pod \"openstack-operator-controller-manager-6565f9cdf-zgq4z\" (UID: \"7ba2ae30-a4df-43a7-b6bc-89814bd65ab7\") " pod="openstack-operators/openstack-operator-controller-manager-6565f9cdf-zgq4z" Oct 01 13:59:12 crc kubenswrapper[4605]: I1001 13:59:12.479734 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7ba2ae30-a4df-43a7-b6bc-89814bd65ab7-cert\") pod \"openstack-operator-controller-manager-6565f9cdf-zgq4z\" (UID: \"7ba2ae30-a4df-43a7-b6bc-89814bd65ab7\") " pod="openstack-operators/openstack-operator-controller-manager-6565f9cdf-zgq4z" Oct 01 13:59:12 crc kubenswrapper[4605]: I1001 13:59:12.479797 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7bmnp\" (UniqueName: \"kubernetes.io/projected/0de7ac6f-ac16-4f83-8e06-10c9b2500491-kube-api-access-7bmnp\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-9dhn9\" (UID: \"0de7ac6f-ac16-4f83-8e06-10c9b2500491\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-9dhn9" Oct 01 13:59:12 crc kubenswrapper[4605]: E1001 13:59:12.480178 4605 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Oct 01 13:59:12 crc kubenswrapper[4605]: E1001 13:59:12.480236 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7ba2ae30-a4df-43a7-b6bc-89814bd65ab7-cert podName:7ba2ae30-a4df-43a7-b6bc-89814bd65ab7 nodeName:}" failed. No retries permitted until 2025-10-01 13:59:12.98021977 +0000 UTC m=+875.724195978 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/7ba2ae30-a4df-43a7-b6bc-89814bd65ab7-cert") pod "openstack-operator-controller-manager-6565f9cdf-zgq4z" (UID: "7ba2ae30-a4df-43a7-b6bc-89814bd65ab7") : secret "webhook-server-cert" not found Oct 01 13:59:12 crc kubenswrapper[4605]: I1001 13:59:12.520616 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-85777745bb-8rcr7" Oct 01 13:59:12 crc kubenswrapper[4605]: I1001 13:59:12.549273 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-45vq2\" (UniqueName: \"kubernetes.io/projected/7ba2ae30-a4df-43a7-b6bc-89814bd65ab7-kube-api-access-45vq2\") pod \"openstack-operator-controller-manager-6565f9cdf-zgq4z\" (UID: \"7ba2ae30-a4df-43a7-b6bc-89814bd65ab7\") " pod="openstack-operators/openstack-operator-controller-manager-6565f9cdf-zgq4z" Oct 01 13:59:12 crc kubenswrapper[4605]: I1001 13:59:12.568274 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-84958c4d49-r2t7s"] Oct 01 13:59:12 crc kubenswrapper[4605]: I1001 13:59:12.571665 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-6b9957f54f-ct9tb" Oct 01 13:59:12 crc kubenswrapper[4605]: I1001 13:59:12.582659 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7bmnp\" (UniqueName: \"kubernetes.io/projected/0de7ac6f-ac16-4f83-8e06-10c9b2500491-kube-api-access-7bmnp\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-9dhn9\" (UID: \"0de7ac6f-ac16-4f83-8e06-10c9b2500491\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-9dhn9" Oct 01 13:59:12 crc kubenswrapper[4605]: I1001 13:59:12.609953 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-644bddb6d8-l6rjg"] Oct 01 13:59:12 crc kubenswrapper[4605]: W1001 13:59:12.626662 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podddfed60b_8b0b_4481_b9f7_f906dd6413f8.slice/crio-0f1b1a774e80b18ce005ea1b25c5738c24b9ba0e894f7ec1c8700eb3f5afacd3 WatchSource:0}: Error finding container 0f1b1a774e80b18ce005ea1b25c5738c24b9ba0e894f7ec1c8700eb3f5afacd3: Status 404 returned error can't find the container with id 0f1b1a774e80b18ce005ea1b25c5738c24b9ba0e894f7ec1c8700eb3f5afacd3 Oct 01 13:59:12 crc kubenswrapper[4605]: I1001 13:59:12.628855 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7bmnp\" (UniqueName: \"kubernetes.io/projected/0de7ac6f-ac16-4f83-8e06-10c9b2500491-kube-api-access-7bmnp\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-9dhn9\" (UID: \"0de7ac6f-ac16-4f83-8e06-10c9b2500491\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-9dhn9" Oct 01 13:59:12 crc kubenswrapper[4605]: W1001 13:59:12.636156 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod685ab06d_d56b_429c_b196_3f2576a63ad5.slice/crio-5552564b4c087097ffee75ab2edb23e3b25bc6bc550e16c89ac267ef94bf1383 WatchSource:0}: Error finding container 5552564b4c087097ffee75ab2edb23e3b25bc6bc550e16c89ac267ef94bf1383: Status 404 returned error can't find the container with id 5552564b4c087097ffee75ab2edb23e3b25bc6bc550e16c89ac267ef94bf1383 Oct 01 13:59:12 crc kubenswrapper[4605]: I1001 13:59:12.677758 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-l6rjg" event={"ID":"ddfed60b-8b0b-4481-b9f7-f906dd6413f8","Type":"ContainerStarted","Data":"0f1b1a774e80b18ce005ea1b25c5738c24b9ba0e894f7ec1c8700eb3f5afacd3"} Oct 01 13:59:12 crc kubenswrapper[4605]: I1001 13:59:12.678817 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-td7cl" event={"ID":"1c52463d-7f43-422b-b6f2-071553e4efb1","Type":"ContainerStarted","Data":"199d5725dce8424ffcc12b7b22f2b577169d6de230e1789768c7bc4a1902eed9"} Oct 01 13:59:12 crc kubenswrapper[4605]: I1001 13:59:12.679563 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-r2t7s" event={"ID":"685ab06d-d56b-429c-b196-3f2576a63ad5","Type":"ContainerStarted","Data":"5552564b4c087097ffee75ab2edb23e3b25bc6bc550e16c89ac267ef94bf1383"} Oct 01 13:59:12 crc kubenswrapper[4605]: I1001 13:59:12.712384 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-9dhn9" Oct 01 13:59:12 crc kubenswrapper[4605]: I1001 13:59:12.990053 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7ba2ae30-a4df-43a7-b6bc-89814bd65ab7-cert\") pod \"openstack-operator-controller-manager-6565f9cdf-zgq4z\" (UID: \"7ba2ae30-a4df-43a7-b6bc-89814bd65ab7\") " pod="openstack-operators/openstack-operator-controller-manager-6565f9cdf-zgq4z" Oct 01 13:59:12 crc kubenswrapper[4605]: I1001 13:59:12.999704 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7ba2ae30-a4df-43a7-b6bc-89814bd65ab7-cert\") pod \"openstack-operator-controller-manager-6565f9cdf-zgq4z\" (UID: \"7ba2ae30-a4df-43a7-b6bc-89814bd65ab7\") " pod="openstack-operators/openstack-operator-controller-manager-6565f9cdf-zgq4z" Oct 01 13:59:13 crc kubenswrapper[4605]: I1001 13:59:13.169908 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5d889d78cf-bc7cq"] Oct 01 13:59:13 crc kubenswrapper[4605]: I1001 13:59:13.199425 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-6565f9cdf-zgq4z" Oct 01 13:59:13 crc kubenswrapper[4605]: I1001 13:59:13.205190 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-9f4696d94-4gzrq"] Oct 01 13:59:13 crc kubenswrapper[4605]: I1001 13:59:13.215120 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-88c7-rscdq"] Oct 01 13:59:13 crc kubenswrapper[4605]: I1001 13:59:13.223713 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-84f4f7b77b-k4j9s"] Oct 01 13:59:13 crc kubenswrapper[4605]: I1001 13:59:13.231776 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-5cd4858477-sb7tm"] Oct 01 13:59:13 crc kubenswrapper[4605]: W1001 13:59:13.266431 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod114ac89a_6b52_4e58_8ec7_1a5ebe953e46.slice/crio-b6062ab5b94051eb5736245a5d2a18ae316bb2381ba6533b95a2b558397cdd41 WatchSource:0}: Error finding container b6062ab5b94051eb5736245a5d2a18ae316bb2381ba6533b95a2b558397cdd41: Status 404 returned error can't find the container with id b6062ab5b94051eb5736245a5d2a18ae316bb2381ba6533b95a2b558397cdd41 Oct 01 13:59:13 crc kubenswrapper[4605]: I1001 13:59:13.280615 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-6d68dbc695-kxwrs"] Oct 01 13:59:13 crc kubenswrapper[4605]: I1001 13:59:13.323549 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-849d5b9b84-b2fzm"] Oct 01 13:59:13 crc kubenswrapper[4605]: I1001 13:59:13.333944 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-64cd67b5cb-xrh9t"] Oct 01 13:59:13 crc kubenswrapper[4605]: I1001 13:59:13.336784 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-665ff6bffd-fdn7m"] Oct 01 13:59:13 crc kubenswrapper[4605]: I1001 13:59:13.340795 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-9976ff44c-8sgk8"] Oct 01 13:59:13 crc kubenswrapper[4605]: I1001 13:59:13.347408 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-7b787867f4-cx7q7"] Oct 01 13:59:13 crc kubenswrapper[4605]: W1001 13:59:13.355183 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode578b0b7_de64_4492_9ab3_b8b73ebd0909.slice/crio-9be028850e9763a49472f01a7d36ba22896d3015bb03bb39d66335e1c2089614 WatchSource:0}: Error finding container 9be028850e9763a49472f01a7d36ba22896d3015bb03bb39d66335e1c2089614: Status 404 returned error can't find the container with id 9be028850e9763a49472f01a7d36ba22896d3015bb03bb39d66335e1c2089614 Oct 01 13:59:13 crc kubenswrapper[4605]: W1001 13:59:13.366269 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd861e141_379a_4d47_bca7_bff86972afaa.slice/crio-bfc30aecf0125e5f8ee28005b312a630d121b31902d532c4206e48f8ef088168 WatchSource:0}: Error finding container bfc30aecf0125e5f8ee28005b312a630d121b31902d532c4206e48f8ef088168: Status 404 returned error can't find the container with id bfc30aecf0125e5f8ee28005b312a630d121b31902d532c4206e48f8ef088168 Oct 01 13:59:13 crc kubenswrapper[4605]: E1001 13:59:13.397623 4605 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:38.102.83.184:5001/openstack-k8s-operators/keystone-operator:d60d115259f059180ec81e6831cedf9599cd9cf8,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-swvkr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-665ff6bffd-fdn7m_openstack-operators(416364d4-8fac-4979-b4f1-e1f009f0b8cd): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 01 13:59:13 crc kubenswrapper[4605]: I1001 13:59:13.409382 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4c039eff-8d65-45d5-9c1a-9fddca3c5e57-cert\") pod \"openstack-baremetal-operator-controller-manager-77b9676b8cqd6pb\" (UID: \"4c039eff-8d65-45d5-9c1a-9fddca3c5e57\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-77b9676b8cqd6pb" Oct 01 13:59:13 crc kubenswrapper[4605]: I1001 13:59:13.420245 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4c039eff-8d65-45d5-9c1a-9fddca3c5e57-cert\") pod \"openstack-baremetal-operator-controller-manager-77b9676b8cqd6pb\" (UID: \"4c039eff-8d65-45d5-9c1a-9fddca3c5e57\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-77b9676b8cqd6pb" Oct 01 13:59:13 crc kubenswrapper[4605]: E1001 13:59:13.589173 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/keystone-operator-controller-manager-665ff6bffd-fdn7m" podUID="416364d4-8fac-4979-b4f1-e1f009f0b8cd" Oct 01 13:59:13 crc kubenswrapper[4605]: I1001 13:59:13.601670 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-6b9957f54f-ct9tb"] Oct 01 13:59:13 crc kubenswrapper[4605]: I1001 13:59:13.616918 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-77b9676b8cqd6pb" Oct 01 13:59:13 crc kubenswrapper[4605]: I1001 13:59:13.701749 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-kxwrs" event={"ID":"e578b0b7-de64-4492-9ab3-b8b73ebd0909","Type":"ContainerStarted","Data":"9be028850e9763a49472f01a7d36ba22896d3015bb03bb39d66335e1c2089614"} Oct 01 13:59:13 crc kubenswrapper[4605]: I1001 13:59:13.708570 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-6b9957f54f-ct9tb" event={"ID":"baf8d82f-41e9-417a-9e88-4320b65d7c6c","Type":"ContainerStarted","Data":"7e9fbcd9450bdb722c050d274e40df7b7c069f7ccd2e2b53f8d3ae25a9cc9bfd"} Oct 01 13:59:13 crc kubenswrapper[4605]: I1001 13:59:13.721050 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-7b787867f4-cx7q7" event={"ID":"f105c6d3-5a2b-442c-ad1c-bcffd3fd869b","Type":"ContainerStarted","Data":"ce8e39b7f7ef6e02f9b00ac586251e30f24d42f085fb6df3bd5fc64a1f9ec868"} Oct 01 13:59:13 crc kubenswrapper[4605]: I1001 13:59:13.722000 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-bc7cq" event={"ID":"6728814d-8d86-4255-8e33-c2205cc3421b","Type":"ContainerStarted","Data":"ab7ad7f87cf08fb3e70e8a18b4421b6ac81b681c1d0dd3d865d6052c1fee1974"} Oct 01 13:59:13 crc kubenswrapper[4605]: I1001 13:59:13.724774 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-8sgk8" event={"ID":"d861e141-379a-4d47-bca7-bff86972afaa","Type":"ContainerStarted","Data":"bfc30aecf0125e5f8ee28005b312a630d121b31902d532c4206e48f8ef088168"} Oct 01 13:59:13 crc kubenswrapper[4605]: I1001 13:59:13.725640 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-84d6b4b759-xdntp"] Oct 01 13:59:13 crc kubenswrapper[4605]: I1001 13:59:13.735635 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-4gzrq" event={"ID":"114ac89a-6b52-4e58-8ec7-1a5ebe953e46","Type":"ContainerStarted","Data":"b6062ab5b94051eb5736245a5d2a18ae316bb2381ba6533b95a2b558397cdd41"} Oct 01 13:59:13 crc kubenswrapper[4605]: I1001 13:59:13.744890 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-88c7-rscdq" event={"ID":"5db7ac6b-c1e1-4640-943f-9db9a460e625","Type":"ContainerStarted","Data":"81ae387ccd0adad4f31d806ac885fa61cc48ddd5727c57f648887047a8cb7fe3"} Oct 01 13:59:13 crc kubenswrapper[4605]: I1001 13:59:13.753584 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-k4j9s" event={"ID":"2f85ca51-dac6-464b-8da5-b2b35511c3a7","Type":"ContainerStarted","Data":"fffc9d116dbcf6cf263b93c2c301910514b4a2415c7129afa56a849691d10a53"} Oct 01 13:59:13 crc kubenswrapper[4605]: I1001 13:59:13.761509 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-589c58c6c-tf7ln"] Oct 01 13:59:13 crc kubenswrapper[4605]: I1001 13:59:13.776581 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-85777745bb-8rcr7"] Oct 01 13:59:13 crc kubenswrapper[4605]: I1001 13:59:13.784180 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-5cd4858477-sb7tm" event={"ID":"a3fbdb59-b188-4842-af73-d3c68afd58ff","Type":"ContainerStarted","Data":"d3c81a38f20517b37b5db2789a50fa2bb1da0df37958a60a90a7a9881b17708c"} Oct 01 13:59:13 crc kubenswrapper[4605]: W1001 13:59:13.795372 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod97fdc8f2_0472_4957_a59a_fd9474c0d15c.slice/crio-ae2ec6fd07d6bb450b9d470a9f8ac600b8c9dd62f27e65d9627bc66cad1ad419 WatchSource:0}: Error finding container ae2ec6fd07d6bb450b9d470a9f8ac600b8c9dd62f27e65d9627bc66cad1ad419: Status 404 returned error can't find the container with id ae2ec6fd07d6bb450b9d470a9f8ac600b8c9dd62f27e65d9627bc66cad1ad419 Oct 01 13:59:13 crc kubenswrapper[4605]: E1001 13:59:13.797806 4605 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/placement-operator@sha256:a6b3408d79df6b6d4a467e49defaa4a9d9c088c94d0605a4fee0030c9ccc84d2,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-gf96h,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-589c58c6c-tf7ln_openstack-operators(97fdc8f2-0472-4957-a59a-fd9474c0d15c): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 01 13:59:13 crc kubenswrapper[4605]: I1001 13:59:13.815552 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-849d5b9b84-b2fzm" event={"ID":"6ee4d18f-3f02-49c4-943c-534e47601be5","Type":"ContainerStarted","Data":"0cd02916b21a379eef2acf91ff55f1599c61d31e0e2346da9797c9df74e1861c"} Oct 01 13:59:13 crc kubenswrapper[4605]: E1001 13:59:13.817083 4605 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:f61fdfbfd12027ce6b4e7ad553ec0582f080de0cfb472de6dc04ad3078bb17e3,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-hh8ww,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-85777745bb-8rcr7_openstack-operators(6e215b5d-0b5c-4587-b8ab-f2a63fb41cb1): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 01 13:59:13 crc kubenswrapper[4605]: I1001 13:59:13.849381 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-665ff6bffd-fdn7m" event={"ID":"416364d4-8fac-4979-b4f1-e1f009f0b8cd","Type":"ContainerStarted","Data":"53fa27727c5d2199d13f8b112272980cdcf5ee5b2e2f481f7ffa7cc5b6e2e8f4"} Oct 01 13:59:13 crc kubenswrapper[4605]: I1001 13:59:13.849416 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-665ff6bffd-fdn7m" event={"ID":"416364d4-8fac-4979-b4f1-e1f009f0b8cd","Type":"ContainerStarted","Data":"8a4ee6cce56704ee93a0d4dc21483fa2437aba57b6b177caaee6be9e749ff23f"} Oct 01 13:59:13 crc kubenswrapper[4605]: I1001 13:59:13.859901 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-9d6c5db85-p5vqs"] Oct 01 13:59:13 crc kubenswrapper[4605]: I1001 13:59:13.865634 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-64cd67b5cb-xrh9t" event={"ID":"21d80fce-11c1-4ca0-8687-dc2bb6ced356","Type":"ContainerStarted","Data":"9a30ffb490521b52f4979722e6be742cd8e7efdc27baa81967a0e5298354e83d"} Oct 01 13:59:13 crc kubenswrapper[4605]: E1001 13:59:13.865995 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.184:5001/openstack-k8s-operators/keystone-operator:d60d115259f059180ec81e6831cedf9599cd9cf8\\\"\"" pod="openstack-operators/keystone-operator-controller-manager-665ff6bffd-fdn7m" podUID="416364d4-8fac-4979-b4f1-e1f009f0b8cd" Oct 01 13:59:13 crc kubenswrapper[4605]: I1001 13:59:13.885446 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-x8fvl"] Oct 01 13:59:13 crc kubenswrapper[4605]: I1001 13:59:13.886289 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-9dhn9"] Oct 01 13:59:13 crc kubenswrapper[4605]: I1001 13:59:13.905749 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-6565f9cdf-zgq4z"] Oct 01 13:59:13 crc kubenswrapper[4605]: E1001 13:59:13.929218 4605 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/infra-operator@sha256:3f96f0843934236c261db73dacb50fc12a288890562ee4ebdc9ec22360937cd3,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{600 -3} {} 600m DecimalSI},memory: {{2147483648 0} {} 2Gi BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{536870912 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:cert,ReadOnly:true,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-s2wlq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod infra-operator-controller-manager-9d6c5db85-p5vqs_openstack-operators(769cd151-8943-4faa-876c-e91d749ef107): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 01 13:59:13 crc kubenswrapper[4605]: W1001 13:59:13.940642 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8dcbd1dd_75c6_40ff_a9ea_267f9be92433.slice/crio-8aae2ba281dfbaf1762fa7f9df1743e20e206a49240b4ae335fa883bc1aedd38 WatchSource:0}: Error finding container 8aae2ba281dfbaf1762fa7f9df1743e20e206a49240b4ae335fa883bc1aedd38: Status 404 returned error can't find the container with id 8aae2ba281dfbaf1762fa7f9df1743e20e206a49240b4ae335fa883bc1aedd38 Oct 01 13:59:14 crc kubenswrapper[4605]: E1001 13:59:14.285559 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-tf7ln" podUID="97fdc8f2-0472-4957-a59a-fd9474c0d15c" Oct 01 13:59:14 crc kubenswrapper[4605]: I1001 13:59:14.305046 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-77b9676b8cqd6pb"] Oct 01 13:59:14 crc kubenswrapper[4605]: W1001 13:59:14.322884 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4c039eff_8d65_45d5_9c1a_9fddca3c5e57.slice/crio-28d543b1305273fbc52f9bea4e53f6bc21a1dcbda8f1e7107c1385c5bede7f42 WatchSource:0}: Error finding container 28d543b1305273fbc52f9bea4e53f6bc21a1dcbda8f1e7107c1385c5bede7f42: Status 404 returned error can't find the container with id 28d543b1305273fbc52f9bea4e53f6bc21a1dcbda8f1e7107c1385c5bede7f42 Oct 01 13:59:14 crc kubenswrapper[4605]: E1001 13:59:14.403548 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/infra-operator-controller-manager-9d6c5db85-p5vqs" podUID="769cd151-8943-4faa-876c-e91d749ef107" Oct 01 13:59:14 crc kubenswrapper[4605]: E1001 13:59:14.530438 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/test-operator-controller-manager-85777745bb-8rcr7" podUID="6e215b5d-0b5c-4587-b8ab-f2a63fb41cb1" Oct 01 13:59:14 crc kubenswrapper[4605]: I1001 13:59:14.947757 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-tf7ln" event={"ID":"97fdc8f2-0472-4957-a59a-fd9474c0d15c","Type":"ContainerStarted","Data":"918b2f982b01cade1e3b4eea00fe707ac64e5ea8421ff84038e366670ee92a6d"} Oct 01 13:59:14 crc kubenswrapper[4605]: I1001 13:59:14.947829 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-tf7ln" event={"ID":"97fdc8f2-0472-4957-a59a-fd9474c0d15c","Type":"ContainerStarted","Data":"ae2ec6fd07d6bb450b9d470a9f8ac600b8c9dd62f27e65d9627bc66cad1ad419"} Oct 01 13:59:14 crc kubenswrapper[4605]: E1001 13:59:14.959145 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:a6b3408d79df6b6d4a467e49defaa4a9d9c088c94d0605a4fee0030c9ccc84d2\\\"\"" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-tf7ln" podUID="97fdc8f2-0472-4957-a59a-fd9474c0d15c" Oct 01 13:59:14 crc kubenswrapper[4605]: I1001 13:59:14.966717 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-9dhn9" event={"ID":"0de7ac6f-ac16-4f83-8e06-10c9b2500491","Type":"ContainerStarted","Data":"bc629e1da42392f2f7262401da86d7a0c406913c77776bd8b3a31537d48f079e"} Oct 01 13:59:14 crc kubenswrapper[4605]: I1001 13:59:14.975657 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-84d6b4b759-xdntp" event={"ID":"34738360-dd91-4a55-b6d2-ab69d1bb5db4","Type":"ContainerStarted","Data":"b3afaf7836c65c5df5f5da93583d1763036165fa9f3c3e981b128db8a2a9f31b"} Oct 01 13:59:15 crc kubenswrapper[4605]: I1001 13:59:15.004394 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-9d6c5db85-p5vqs" event={"ID":"769cd151-8943-4faa-876c-e91d749ef107","Type":"ContainerStarted","Data":"e8bf50fe52c31e55ef8d5e77a0d5444a7d17f3f03c4070f05bbf7364d1a63d89"} Oct 01 13:59:15 crc kubenswrapper[4605]: I1001 13:59:15.004472 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-9d6c5db85-p5vqs" event={"ID":"769cd151-8943-4faa-876c-e91d749ef107","Type":"ContainerStarted","Data":"592586c438a7dc4377e3a529e43c4b0a874ffd99d61c5c217dddcbdc320af7f0"} Oct 01 13:59:15 crc kubenswrapper[4605]: E1001 13:59:15.008750 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/infra-operator@sha256:3f96f0843934236c261db73dacb50fc12a288890562ee4ebdc9ec22360937cd3\\\"\"" pod="openstack-operators/infra-operator-controller-manager-9d6c5db85-p5vqs" podUID="769cd151-8943-4faa-876c-e91d749ef107" Oct 01 13:59:15 crc kubenswrapper[4605]: I1001 13:59:15.033192 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-6565f9cdf-zgq4z" event={"ID":"7ba2ae30-a4df-43a7-b6bc-89814bd65ab7","Type":"ContainerStarted","Data":"3784ff92d52d16a3bbe45042ac08576ac2f9f427e8fa48b1c37bf70fe396cd81"} Oct 01 13:59:15 crc kubenswrapper[4605]: I1001 13:59:15.033232 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-6565f9cdf-zgq4z" event={"ID":"7ba2ae30-a4df-43a7-b6bc-89814bd65ab7","Type":"ContainerStarted","Data":"de7ce2c861c381d62116d71dba3646748004173cf7e776473b3124ac720b516d"} Oct 01 13:59:15 crc kubenswrapper[4605]: I1001 13:59:15.033241 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-6565f9cdf-zgq4z" event={"ID":"7ba2ae30-a4df-43a7-b6bc-89814bd65ab7","Type":"ContainerStarted","Data":"d28c51ea0469d78a4636de7cc17cb324950c571ccca655359383b520da11fbd4"} Oct 01 13:59:15 crc kubenswrapper[4605]: I1001 13:59:15.033549 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-6565f9cdf-zgq4z" Oct 01 13:59:15 crc kubenswrapper[4605]: I1001 13:59:15.064343 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-85777745bb-8rcr7" event={"ID":"6e215b5d-0b5c-4587-b8ab-f2a63fb41cb1","Type":"ContainerStarted","Data":"3d2c2ddd81351376c7111f098bd6374bff9230a06fb199eccb4a3519804c5727"} Oct 01 13:59:15 crc kubenswrapper[4605]: I1001 13:59:15.064444 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-85777745bb-8rcr7" event={"ID":"6e215b5d-0b5c-4587-b8ab-f2a63fb41cb1","Type":"ContainerStarted","Data":"45b245b322a7f4c6f1b01fc433ebc84a922882fa5e46588107c1165fa20d5d95"} Oct 01 13:59:15 crc kubenswrapper[4605]: E1001 13:59:15.068139 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:f61fdfbfd12027ce6b4e7ad553ec0582f080de0cfb472de6dc04ad3078bb17e3\\\"\"" pod="openstack-operators/test-operator-controller-manager-85777745bb-8rcr7" podUID="6e215b5d-0b5c-4587-b8ab-f2a63fb41cb1" Oct 01 13:59:15 crc kubenswrapper[4605]: I1001 13:59:15.083346 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-x8fvl" event={"ID":"8dcbd1dd-75c6-40ff-a9ea-267f9be92433","Type":"ContainerStarted","Data":"8aae2ba281dfbaf1762fa7f9df1743e20e206a49240b4ae335fa883bc1aedd38"} Oct 01 13:59:15 crc kubenswrapper[4605]: I1001 13:59:15.099054 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-77b9676b8cqd6pb" event={"ID":"4c039eff-8d65-45d5-9c1a-9fddca3c5e57","Type":"ContainerStarted","Data":"28d543b1305273fbc52f9bea4e53f6bc21a1dcbda8f1e7107c1385c5bede7f42"} Oct 01 13:59:15 crc kubenswrapper[4605]: E1001 13:59:15.102324 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.184:5001/openstack-k8s-operators/keystone-operator:d60d115259f059180ec81e6831cedf9599cd9cf8\\\"\"" pod="openstack-operators/keystone-operator-controller-manager-665ff6bffd-fdn7m" podUID="416364d4-8fac-4979-b4f1-e1f009f0b8cd" Oct 01 13:59:15 crc kubenswrapper[4605]: I1001 13:59:15.384111 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-6565f9cdf-zgq4z" podStartSLOduration=3.384076888 podStartE2EDuration="3.384076888s" podCreationTimestamp="2025-10-01 13:59:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 13:59:15.38101406 +0000 UTC m=+878.124990268" watchObservedRunningTime="2025-10-01 13:59:15.384076888 +0000 UTC m=+878.128053086" Oct 01 13:59:16 crc kubenswrapper[4605]: E1001 13:59:16.120044 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/infra-operator@sha256:3f96f0843934236c261db73dacb50fc12a288890562ee4ebdc9ec22360937cd3\\\"\"" pod="openstack-operators/infra-operator-controller-manager-9d6c5db85-p5vqs" podUID="769cd151-8943-4faa-876c-e91d749ef107" Oct 01 13:59:16 crc kubenswrapper[4605]: E1001 13:59:16.120855 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:a6b3408d79df6b6d4a467e49defaa4a9d9c088c94d0605a4fee0030c9ccc84d2\\\"\"" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-tf7ln" podUID="97fdc8f2-0472-4957-a59a-fd9474c0d15c" Oct 01 13:59:16 crc kubenswrapper[4605]: E1001 13:59:16.120937 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:f61fdfbfd12027ce6b4e7ad553ec0582f080de0cfb472de6dc04ad3078bb17e3\\\"\"" pod="openstack-operators/test-operator-controller-manager-85777745bb-8rcr7" podUID="6e215b5d-0b5c-4587-b8ab-f2a63fb41cb1" Oct 01 13:59:23 crc kubenswrapper[4605]: I1001 13:59:23.206949 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-6565f9cdf-zgq4z" Oct 01 13:59:26 crc kubenswrapper[4605]: E1001 13:59:26.966168 4605 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/swift-operator@sha256:bca053da8adc37a9a246b478949960ac7abef8fcc0c58a2a45045c59a62b5fe4" Oct 01 13:59:26 crc kubenswrapper[4605]: E1001 13:59:26.966668 4605 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:bca053da8adc37a9a246b478949960ac7abef8fcc0c58a2a45045c59a62b5fe4,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-wffxl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-84d6b4b759-xdntp_openstack-operators(34738360-dd91-4a55-b6d2-ab69d1bb5db4): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 01 13:59:27 crc kubenswrapper[4605]: E1001 13:59:27.884772 4605 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:e7cfed051c1cf801e651fd4035070e38698039f284ac0b2a0332769fdbb4a9c8" Oct 01 13:59:27 crc kubenswrapper[4605]: E1001 13:59:27.886442 4605 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:e7cfed051c1cf801e651fd4035070e38698039f284ac0b2a0332769fdbb4a9c8,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:true,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-baremetal-operator-agent:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_ANSIBLEEE_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-ansibleee-runner:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_EVALUATOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-evaluator:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_LISTENER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-listener:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_NOTIFIER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-notifier:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_APACHE_IMAGE_URL_DEFAULT,Value:registry.redhat.io/ubi9/httpd-24:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BARBICAN_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BARBICAN_KEYSTONE_LISTENER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-barbican-keystone-listener:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BARBICAN_WORKER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-barbican-worker:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_CENTRAL_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-central:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_COMPUTE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_IPMI_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-ipmi:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_MYSQLD_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/prometheus/mysqld-exporter:v0.15.1,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_NOTIFICATION_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-notification:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_SGCORE_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/sg-core:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_BACKUP_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-backup:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_SCHEDULER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-scheduler:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_VOLUME_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-volume:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_BACKENDBIND9_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-backend-bind9:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_CENTRAL_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-central:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_MDNS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-mdns:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_PRODUCER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-producer:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_UNBOUND_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-unbound:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_WORKER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-worker:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_FRR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-frr:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_ISCSID_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-iscsid:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_KEPLER_IMAGE_URL_DEFAULT,Value:quay.io/sustainable_computing_io/kepler:release-0.7.12,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_LOGROTATE_CROND_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cron:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_MULTIPATHD_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-multipathd:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_DHCP_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-dhcp-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_METADATA_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_OVN_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-ovn-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_SRIOV_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-sriov-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NODE_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/prometheus/node-exporter:v1.5.0,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_OVN_BGP_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-bgp-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_PODMAN_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/navidys/prometheus-podman-exporter:v1.10.1,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_GLANCE_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-glance-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HEAT_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-heat-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HEAT_CFNAPI_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-heat-api-cfn:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HEAT_ENGINE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-heat-engine:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HORIZON_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_INFRA_MEMCACHED_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-memcached:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_INFRA_REDIS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-redis:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_CONDUCTOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-conductor:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_INSPECTOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-inspector:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_NEUTRON_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-neutron-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_PXE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-pxe:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_PYTHON_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/ironic-python-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_KEYSTONE_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-keystone:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_KSM_IMAGE_URL_DEFAULT,Value:registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MANILA_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-manila-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MANILA_SCHEDULER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-manila-scheduler:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MANILA_SHARE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-manila-share:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MARIADB_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-mariadb:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NET_UTILS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-netutils:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NEUTRON_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_COMPUTE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-compute:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_CONDUCTOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-conductor:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_NOVNC_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-novncproxy:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_SCHEDULER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-scheduler:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_HEALTHMANAGER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-health-manager:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_HOUSEKEEPING_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-housekeeping:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_RSYSLOG_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-rsyslog:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_WORKER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-worker:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_CLIENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-openstackclient:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_LIGHTSPEED_IMAGE_URL_DEFAULT,Value:quay.io/openstack-lightspeed/rag-content:os-docs-2024.2,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_MUST_GATHER_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-must-gather:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_NETWORK_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OS_CONTAINER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/edpm-hardened-uefi:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_CONTROLLER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_CONTROLLER_OVS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-base:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_NB_DBCLUSTER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-nb-db-server:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_NORTHD_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-northd:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_SB_DBCLUSTER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-sb-db-server:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_PLACEMENT_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-placement-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_RABBITMQ_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_ACCOUNT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-account:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_CONTAINER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-container:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_OBJECT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-object:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_PROXY_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-proxy-server:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_TEST_TEMPEST_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_WATCHER_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-master-centos9/openstack-watcher-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_WATCHER_APPLIER_IMAGE_URL_DEFAULT,Value:quay.io/podified-master-centos9/openstack-watcher-applier:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_WATCHER_DECISION_ENGINE_IMAGE_URL_DEFAULT,Value:quay.io/podified-master-centos9/openstack-watcher-decision-engine:current-podified,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:cert,ReadOnly:true,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-pgwh2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-baremetal-operator-controller-manager-77b9676b8cqd6pb_openstack-operators(4c039eff-8d65-45d5-9c1a-9fddca3c5e57): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 01 13:59:28 crc kubenswrapper[4605]: E1001 13:59:28.502592 4605 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/designate-operator@sha256:f6b935f67979298c3c263ad84d277e5cf26c0dbba3f85f255c1ec4d1d75241d2" Oct 01 13:59:28 crc kubenswrapper[4605]: E1001 13:59:28.502849 4605 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/designate-operator@sha256:f6b935f67979298c3c263ad84d277e5cf26c0dbba3f85f255c1ec4d1d75241d2,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-5lgsp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod designate-operator-controller-manager-84f4f7b77b-k4j9s_openstack-operators(2f85ca51-dac6-464b-8da5-b2b35511c3a7): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 01 13:59:29 crc kubenswrapper[4605]: E1001 13:59:29.021011 4605 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/octavia-operator@sha256:e1328760310f3bbf4548b8b1268cd711087dd91212b92bb0be287cad1f1b6fe9" Oct 01 13:59:29 crc kubenswrapper[4605]: E1001 13:59:29.021217 4605 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/octavia-operator@sha256:e1328760310f3bbf4548b8b1268cd711087dd91212b92bb0be287cad1f1b6fe9,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-66ksk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-7b787867f4-cx7q7_openstack-operators(f105c6d3-5a2b-442c-ad1c-bcffd3fd869b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 01 13:59:29 crc kubenswrapper[4605]: E1001 13:59:29.382778 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/swift-operator-controller-manager-84d6b4b759-xdntp" podUID="34738360-dd91-4a55-b6d2-ab69d1bb5db4" Oct 01 13:59:29 crc kubenswrapper[4605]: E1001 13:59:29.439177 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/openstack-baremetal-operator-controller-manager-77b9676b8cqd6pb" podUID="4c039eff-8d65-45d5-9c1a-9fddca3c5e57" Oct 01 13:59:29 crc kubenswrapper[4605]: E1001 13:59:29.464851 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/octavia-operator-controller-manager-7b787867f4-cx7q7" podUID="f105c6d3-5a2b-442c-ad1c-bcffd3fd869b" Oct 01 13:59:29 crc kubenswrapper[4605]: E1001 13:59:29.733913 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-k4j9s" podUID="2f85ca51-dac6-464b-8da5-b2b35511c3a7" Oct 01 13:59:30 crc kubenswrapper[4605]: I1001 13:59:30.235399 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-td7cl" event={"ID":"1c52463d-7f43-422b-b6f2-071553e4efb1","Type":"ContainerStarted","Data":"6e09674a0d8763e6f4b69fa0e9da4547c16948ab5c8991d30a2ea10f523812a7"} Oct 01 13:59:30 crc kubenswrapper[4605]: I1001 13:59:30.244174 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-r2t7s" event={"ID":"685ab06d-d56b-429c-b196-3f2576a63ad5","Type":"ContainerStarted","Data":"e1ae4bdcfbe4bf8ac097916c4a99a20d889d26f0714172674b956cbf0d7b21a4"} Oct 01 13:59:30 crc kubenswrapper[4605]: I1001 13:59:30.259904 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-77b9676b8cqd6pb" event={"ID":"4c039eff-8d65-45d5-9c1a-9fddca3c5e57","Type":"ContainerStarted","Data":"84f94c7e4d91fc67d1d2b937eb70a40d4231f56db292b9e875d773221853f3c0"} Oct 01 13:59:30 crc kubenswrapper[4605]: E1001 13:59:30.269293 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:e7cfed051c1cf801e651fd4035070e38698039f284ac0b2a0332769fdbb4a9c8\\\"\"" pod="openstack-operators/openstack-baremetal-operator-controller-manager-77b9676b8cqd6pb" podUID="4c039eff-8d65-45d5-9c1a-9fddca3c5e57" Oct 01 13:59:30 crc kubenswrapper[4605]: I1001 13:59:30.289360 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-9dhn9" event={"ID":"0de7ac6f-ac16-4f83-8e06-10c9b2500491","Type":"ContainerStarted","Data":"77e00269d4f758340c7b0ca16e58f6d4202048e5f4d7ed3df0de3bc5abc4097c"} Oct 01 13:59:30 crc kubenswrapper[4605]: I1001 13:59:30.341273 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-84d6b4b759-xdntp" event={"ID":"34738360-dd91-4a55-b6d2-ab69d1bb5db4","Type":"ContainerStarted","Data":"ba62fce67eae358ee4ff1608bd043a079879d172e2ea25d005d930acd34a621f"} Oct 01 13:59:30 crc kubenswrapper[4605]: E1001 13:59:30.364235 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:bca053da8adc37a9a246b478949960ac7abef8fcc0c58a2a45045c59a62b5fe4\\\"\"" pod="openstack-operators/swift-operator-controller-manager-84d6b4b759-xdntp" podUID="34738360-dd91-4a55-b6d2-ab69d1bb5db4" Oct 01 13:59:30 crc kubenswrapper[4605]: I1001 13:59:30.415958 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-x8fvl" event={"ID":"8dcbd1dd-75c6-40ff-a9ea-267f9be92433","Type":"ContainerStarted","Data":"9a997e271c0ae7e36221dc8d188543c0e6bb49542776ee9e84535cdc971f0183"} Oct 01 13:59:30 crc kubenswrapper[4605]: I1001 13:59:30.445534 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-7b787867f4-cx7q7" event={"ID":"f105c6d3-5a2b-442c-ad1c-bcffd3fd869b","Type":"ContainerStarted","Data":"fabe88a919693c653907aac91f2b2cac695019f4ca1fb2b2e9a3ac3114796a57"} Oct 01 13:59:30 crc kubenswrapper[4605]: E1001 13:59:30.459017 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:e1328760310f3bbf4548b8b1268cd711087dd91212b92bb0be287cad1f1b6fe9\\\"\"" pod="openstack-operators/octavia-operator-controller-manager-7b787867f4-cx7q7" podUID="f105c6d3-5a2b-442c-ad1c-bcffd3fd869b" Oct 01 13:59:30 crc kubenswrapper[4605]: I1001 13:59:30.476483 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-9dhn9" podStartSLOduration=3.274926316 podStartE2EDuration="18.476463809s" podCreationTimestamp="2025-10-01 13:59:12 +0000 UTC" firstStartedPulling="2025-10-01 13:59:13.928764187 +0000 UTC m=+876.672740395" lastFinishedPulling="2025-10-01 13:59:29.13030167 +0000 UTC m=+891.874277888" observedRunningTime="2025-10-01 13:59:30.460401591 +0000 UTC m=+893.204377799" watchObservedRunningTime="2025-10-01 13:59:30.476463809 +0000 UTC m=+893.220440017" Oct 01 13:59:30 crc kubenswrapper[4605]: I1001 13:59:30.493816 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-4gzrq" event={"ID":"114ac89a-6b52-4e58-8ec7-1a5ebe953e46","Type":"ContainerStarted","Data":"9d5c809b0a37992154673bcc9f66cee3262015e1e7b27daa04200b4570737e21"} Oct 01 13:59:30 crc kubenswrapper[4605]: I1001 13:59:30.524273 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-849d5b9b84-b2fzm" event={"ID":"6ee4d18f-3f02-49c4-943c-534e47601be5","Type":"ContainerStarted","Data":"49333250170f5a54f977826fbf841a33aebe4c5a9e2ac5e4f1aecc40c6d6892c"} Oct 01 13:59:30 crc kubenswrapper[4605]: I1001 13:59:30.542289 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-8sgk8" event={"ID":"d861e141-379a-4d47-bca7-bff86972afaa","Type":"ContainerStarted","Data":"2503e438f4547b45683d579f53b0b72b4d51de116eb942f8827db5fdfabbd51f"} Oct 01 13:59:30 crc kubenswrapper[4605]: I1001 13:59:30.560908 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-bc7cq" event={"ID":"6728814d-8d86-4255-8e33-c2205cc3421b","Type":"ContainerStarted","Data":"e43a3cf8afb4ac70bc0861dedae2562fc8a65b399ae32e983f3218a3b746cbf1"} Oct 01 13:59:30 crc kubenswrapper[4605]: I1001 13:59:30.564428 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-5cd4858477-sb7tm" event={"ID":"a3fbdb59-b188-4842-af73-d3c68afd58ff","Type":"ContainerStarted","Data":"f387ffb2480e1bf7b9f537570a2ab1591fdf0fa9af9ee8ac066b0fcd1dd0ce60"} Oct 01 13:59:30 crc kubenswrapper[4605]: I1001 13:59:30.564457 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-5cd4858477-sb7tm" event={"ID":"a3fbdb59-b188-4842-af73-d3c68afd58ff","Type":"ContainerStarted","Data":"6fd45f5935dba5708d67def06cd8a8d128fc86c4f2e6b13d629a7dff1390999d"} Oct 01 13:59:30 crc kubenswrapper[4605]: I1001 13:59:30.565191 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-5cd4858477-sb7tm" Oct 01 13:59:30 crc kubenswrapper[4605]: I1001 13:59:30.585344 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-6b9957f54f-ct9tb" event={"ID":"baf8d82f-41e9-417a-9e88-4320b65d7c6c","Type":"ContainerStarted","Data":"f9dbe7d12b8ded777189f08162ff2405314bb6f26f56c18fe7dfc3394c61250e"} Oct 01 13:59:30 crc kubenswrapper[4605]: I1001 13:59:30.585374 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-6b9957f54f-ct9tb" event={"ID":"baf8d82f-41e9-417a-9e88-4320b65d7c6c","Type":"ContainerStarted","Data":"45a0c32f52995b76b0cd291ac145620a737cba01d930ed1b359bbfcdea3e5871"} Oct 01 13:59:30 crc kubenswrapper[4605]: I1001 13:59:30.585880 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-6b9957f54f-ct9tb" Oct 01 13:59:30 crc kubenswrapper[4605]: I1001 13:59:30.595213 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-5cd4858477-sb7tm" podStartSLOduration=3.750567874 podStartE2EDuration="19.595195753s" podCreationTimestamp="2025-10-01 13:59:11 +0000 UTC" firstStartedPulling="2025-10-01 13:59:13.285582968 +0000 UTC m=+876.029559176" lastFinishedPulling="2025-10-01 13:59:29.130210847 +0000 UTC m=+891.874187055" observedRunningTime="2025-10-01 13:59:30.594055914 +0000 UTC m=+893.338032112" watchObservedRunningTime="2025-10-01 13:59:30.595195753 +0000 UTC m=+893.339171961" Oct 01 13:59:30 crc kubenswrapper[4605]: I1001 13:59:30.609775 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-88c7-rscdq" event={"ID":"5db7ac6b-c1e1-4640-943f-9db9a460e625","Type":"ContainerStarted","Data":"d913f908eb4104c04ef8ac867937f2cb63524e08092d3332aaf1a37a4fa17bc1"} Oct 01 13:59:30 crc kubenswrapper[4605]: I1001 13:59:30.647006 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-k4j9s" event={"ID":"2f85ca51-dac6-464b-8da5-b2b35511c3a7","Type":"ContainerStarted","Data":"d029b96995c7450cff5f09830fbaf8d6d343306c58170eab526f025815a04153"} Oct 01 13:59:30 crc kubenswrapper[4605]: E1001 13:59:30.651838 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/designate-operator@sha256:f6b935f67979298c3c263ad84d277e5cf26c0dbba3f85f255c1ec4d1d75241d2\\\"\"" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-k4j9s" podUID="2f85ca51-dac6-464b-8da5-b2b35511c3a7" Oct 01 13:59:30 crc kubenswrapper[4605]: I1001 13:59:30.659521 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-6b9957f54f-ct9tb" podStartSLOduration=4.180506319 podStartE2EDuration="19.659503316s" podCreationTimestamp="2025-10-01 13:59:11 +0000 UTC" firstStartedPulling="2025-10-01 13:59:13.655363786 +0000 UTC m=+876.399339994" lastFinishedPulling="2025-10-01 13:59:29.134360773 +0000 UTC m=+891.878336991" observedRunningTime="2025-10-01 13:59:30.656405887 +0000 UTC m=+893.400382085" watchObservedRunningTime="2025-10-01 13:59:30.659503316 +0000 UTC m=+893.403479524" Oct 01 13:59:30 crc kubenswrapper[4605]: I1001 13:59:30.672724 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-l6rjg" event={"ID":"ddfed60b-8b0b-4481-b9f7-f906dd6413f8","Type":"ContainerStarted","Data":"6292608729b3cd2a746535a825748374ed5de56b4113592ba5d5491c47346267"} Oct 01 13:59:31 crc kubenswrapper[4605]: I1001 13:59:31.683384 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-kxwrs" event={"ID":"e578b0b7-de64-4492-9ab3-b8b73ebd0909","Type":"ContainerStarted","Data":"c03dc2c5ef0b1106e8762e1c92f11bdad458891f0d25ddfc0a3ea3e949c7e057"} Oct 01 13:59:31 crc kubenswrapper[4605]: I1001 13:59:31.683426 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-kxwrs" event={"ID":"e578b0b7-de64-4492-9ab3-b8b73ebd0909","Type":"ContainerStarted","Data":"1069bdaa61ecec5ff2d57e0523b6f0c38f592f35949bc5995b8e4d2dacadc7ac"} Oct 01 13:59:31 crc kubenswrapper[4605]: I1001 13:59:31.684385 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-kxwrs" Oct 01 13:59:31 crc kubenswrapper[4605]: I1001 13:59:31.686612 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-l6rjg" event={"ID":"ddfed60b-8b0b-4481-b9f7-f906dd6413f8","Type":"ContainerStarted","Data":"f06c2309950ec0dc3cf60107849d0271c8b261d41ce5f4306b9a992addfc3e34"} Oct 01 13:59:31 crc kubenswrapper[4605]: I1001 13:59:31.687239 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-l6rjg" Oct 01 13:59:31 crc kubenswrapper[4605]: I1001 13:59:31.697416 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-64cd67b5cb-xrh9t" event={"ID":"21d80fce-11c1-4ca0-8687-dc2bb6ced356","Type":"ContainerStarted","Data":"dd7ae321017cdc4a6389fa7be28fc067e191c211c9f22d8f2c6ccb3814aac108"} Oct 01 13:59:31 crc kubenswrapper[4605]: I1001 13:59:31.697469 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-64cd67b5cb-xrh9t" event={"ID":"21d80fce-11c1-4ca0-8687-dc2bb6ced356","Type":"ContainerStarted","Data":"cc4c2ddee8c8e6f61a46530dae28c876c91011ee6b8cacd1de552018dcf470f3"} Oct 01 13:59:31 crc kubenswrapper[4605]: I1001 13:59:31.697583 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-64cd67b5cb-xrh9t" Oct 01 13:59:31 crc kubenswrapper[4605]: I1001 13:59:31.707556 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-4gzrq" event={"ID":"114ac89a-6b52-4e58-8ec7-1a5ebe953e46","Type":"ContainerStarted","Data":"809947f76be28ffa31650ca97ddd8e336ba5c5b63a6740cd29cf496632503d05"} Oct 01 13:59:31 crc kubenswrapper[4605]: I1001 13:59:31.707843 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-4gzrq" Oct 01 13:59:31 crc kubenswrapper[4605]: I1001 13:59:31.715367 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-bc7cq" event={"ID":"6728814d-8d86-4255-8e33-c2205cc3421b","Type":"ContainerStarted","Data":"3d984b82ee436dfbc932cce7178bedc79d12a5d84aa081bfb32680b675f346fc"} Oct 01 13:59:31 crc kubenswrapper[4605]: I1001 13:59:31.716394 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-bc7cq" Oct 01 13:59:31 crc kubenswrapper[4605]: I1001 13:59:31.737383 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-x8fvl" event={"ID":"8dcbd1dd-75c6-40ff-a9ea-267f9be92433","Type":"ContainerStarted","Data":"a303d6f46e8ff2f5004c5c3c3f8b1e877381353f4f385e0ad61125c5b7a6b09c"} Oct 01 13:59:31 crc kubenswrapper[4605]: I1001 13:59:31.738046 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-x8fvl" Oct 01 13:59:31 crc kubenswrapper[4605]: I1001 13:59:31.744034 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-kxwrs" podStartSLOduration=5.016463694 podStartE2EDuration="20.744019321s" podCreationTimestamp="2025-10-01 13:59:11 +0000 UTC" firstStartedPulling="2025-10-01 13:59:13.357673168 +0000 UTC m=+876.101649376" lastFinishedPulling="2025-10-01 13:59:29.085228785 +0000 UTC m=+891.829205003" observedRunningTime="2025-10-01 13:59:31.720303019 +0000 UTC m=+894.464279227" watchObservedRunningTime="2025-10-01 13:59:31.744019321 +0000 UTC m=+894.487995519" Oct 01 13:59:31 crc kubenswrapper[4605]: I1001 13:59:31.744599 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-bc7cq" podStartSLOduration=5.898726294 podStartE2EDuration="21.744595095s" podCreationTimestamp="2025-10-01 13:59:10 +0000 UTC" firstStartedPulling="2025-10-01 13:59:13.284316826 +0000 UTC m=+876.028293034" lastFinishedPulling="2025-10-01 13:59:29.130185627 +0000 UTC m=+891.874161835" observedRunningTime="2025-10-01 13:59:31.741562718 +0000 UTC m=+894.485538926" watchObservedRunningTime="2025-10-01 13:59:31.744595095 +0000 UTC m=+894.488571303" Oct 01 13:59:31 crc kubenswrapper[4605]: I1001 13:59:31.765441 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-r2t7s" event={"ID":"685ab06d-d56b-429c-b196-3f2576a63ad5","Type":"ContainerStarted","Data":"bbd71774a034a62a1a4f616b2ac8115e2e99164602adfdaffea5ce62bb14ff62"} Oct 01 13:59:31 crc kubenswrapper[4605]: I1001 13:59:31.766061 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-r2t7s" Oct 01 13:59:31 crc kubenswrapper[4605]: I1001 13:59:31.771624 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-88c7-rscdq" event={"ID":"5db7ac6b-c1e1-4640-943f-9db9a460e625","Type":"ContainerStarted","Data":"50c19e86d32beddca4148696040c9859c80879d92768a53dae1408e08f3938f7"} Oct 01 13:59:31 crc kubenswrapper[4605]: I1001 13:59:31.772364 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-88c7-rscdq" Oct 01 13:59:31 crc kubenswrapper[4605]: I1001 13:59:31.781976 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-64cd67b5cb-xrh9t" podStartSLOduration=5.084871631 podStartE2EDuration="20.781963344s" podCreationTimestamp="2025-10-01 13:59:11 +0000 UTC" firstStartedPulling="2025-10-01 13:59:13.384706485 +0000 UTC m=+876.128682693" lastFinishedPulling="2025-10-01 13:59:29.081798198 +0000 UTC m=+891.825774406" observedRunningTime="2025-10-01 13:59:31.780954519 +0000 UTC m=+894.524930727" watchObservedRunningTime="2025-10-01 13:59:31.781963344 +0000 UTC m=+894.525939552" Oct 01 13:59:31 crc kubenswrapper[4605]: I1001 13:59:31.782040 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-8sgk8" event={"ID":"d861e141-379a-4d47-bca7-bff86972afaa","Type":"ContainerStarted","Data":"95ab838a8414cd35bdca7e00812d8c1b9edbe0820de383509529e0a78bf55b26"} Oct 01 13:59:31 crc kubenswrapper[4605]: I1001 13:59:31.782146 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-8sgk8" Oct 01 13:59:31 crc kubenswrapper[4605]: I1001 13:59:31.812386 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-td7cl" event={"ID":"1c52463d-7f43-422b-b6f2-071553e4efb1","Type":"ContainerStarted","Data":"8cab575375f36afaec2e56edc90ddf962048416d310591adf4eefaf4db9d2bcb"} Oct 01 13:59:31 crc kubenswrapper[4605]: I1001 13:59:31.812524 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-td7cl" Oct 01 13:59:31 crc kubenswrapper[4605]: I1001 13:59:31.828140 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-849d5b9b84-b2fzm" event={"ID":"6ee4d18f-3f02-49c4-943c-534e47601be5","Type":"ContainerStarted","Data":"607df4f4de55259399d725cefcb3156baf44b206a25870d31cb7d4912f50c1d3"} Oct 01 13:59:31 crc kubenswrapper[4605]: I1001 13:59:31.829925 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-849d5b9b84-b2fzm" Oct 01 13:59:31 crc kubenswrapper[4605]: E1001 13:59:31.835350 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:e1328760310f3bbf4548b8b1268cd711087dd91212b92bb0be287cad1f1b6fe9\\\"\"" pod="openstack-operators/octavia-operator-controller-manager-7b787867f4-cx7q7" podUID="f105c6d3-5a2b-442c-ad1c-bcffd3fd869b" Oct 01 13:59:31 crc kubenswrapper[4605]: E1001 13:59:31.835708 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:e7cfed051c1cf801e651fd4035070e38698039f284ac0b2a0332769fdbb4a9c8\\\"\"" pod="openstack-operators/openstack-baremetal-operator-controller-manager-77b9676b8cqd6pb" podUID="4c039eff-8d65-45d5-9c1a-9fddca3c5e57" Oct 01 13:59:31 crc kubenswrapper[4605]: E1001 13:59:31.835757 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/designate-operator@sha256:f6b935f67979298c3c263ad84d277e5cf26c0dbba3f85f255c1ec4d1d75241d2\\\"\"" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-k4j9s" podUID="2f85ca51-dac6-464b-8da5-b2b35511c3a7" Oct 01 13:59:31 crc kubenswrapper[4605]: E1001 13:59:31.835790 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:bca053da8adc37a9a246b478949960ac7abef8fcc0c58a2a45045c59a62b5fe4\\\"\"" pod="openstack-operators/swift-operator-controller-manager-84d6b4b759-xdntp" podUID="34738360-dd91-4a55-b6d2-ab69d1bb5db4" Oct 01 13:59:31 crc kubenswrapper[4605]: I1001 13:59:31.836132 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-l6rjg" podStartSLOduration=5.342438181 podStartE2EDuration="21.836116019s" podCreationTimestamp="2025-10-01 13:59:10 +0000 UTC" firstStartedPulling="2025-10-01 13:59:12.635618666 +0000 UTC m=+875.379594874" lastFinishedPulling="2025-10-01 13:59:29.129296504 +0000 UTC m=+891.873272712" observedRunningTime="2025-10-01 13:59:31.835965145 +0000 UTC m=+894.579941353" watchObservedRunningTime="2025-10-01 13:59:31.836116019 +0000 UTC m=+894.580092227" Oct 01 13:59:31 crc kubenswrapper[4605]: I1001 13:59:31.840984 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-4gzrq" podStartSLOduration=6.039989661 podStartE2EDuration="21.840969252s" podCreationTimestamp="2025-10-01 13:59:10 +0000 UTC" firstStartedPulling="2025-10-01 13:59:13.284807288 +0000 UTC m=+876.028783496" lastFinishedPulling="2025-10-01 13:59:29.085786879 +0000 UTC m=+891.829763087" observedRunningTime="2025-10-01 13:59:31.812227563 +0000 UTC m=+894.556203771" watchObservedRunningTime="2025-10-01 13:59:31.840969252 +0000 UTC m=+894.584945460" Oct 01 13:59:31 crc kubenswrapper[4605]: I1001 13:59:31.862674 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-8sgk8" podStartSLOduration=5.170759411 podStartE2EDuration="20.862658873s" podCreationTimestamp="2025-10-01 13:59:11 +0000 UTC" firstStartedPulling="2025-10-01 13:59:13.391451536 +0000 UTC m=+876.135427744" lastFinishedPulling="2025-10-01 13:59:29.083350998 +0000 UTC m=+891.827327206" observedRunningTime="2025-10-01 13:59:31.856268541 +0000 UTC m=+894.600244749" watchObservedRunningTime="2025-10-01 13:59:31.862658873 +0000 UTC m=+894.606635081" Oct 01 13:59:31 crc kubenswrapper[4605]: I1001 13:59:31.924816 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-x8fvl" podStartSLOduration=5.965233583 podStartE2EDuration="20.924798441s" podCreationTimestamp="2025-10-01 13:59:11 +0000 UTC" firstStartedPulling="2025-10-01 13:59:14.125709938 +0000 UTC m=+876.869686146" lastFinishedPulling="2025-10-01 13:59:29.085274796 +0000 UTC m=+891.829251004" observedRunningTime="2025-10-01 13:59:31.89642304 +0000 UTC m=+894.640399248" watchObservedRunningTime="2025-10-01 13:59:31.924798441 +0000 UTC m=+894.668774649" Oct 01 13:59:31 crc kubenswrapper[4605]: I1001 13:59:31.949355 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-849d5b9b84-b2fzm" podStartSLOduration=5.258957561 podStartE2EDuration="20.949333064s" podCreationTimestamp="2025-10-01 13:59:11 +0000 UTC" firstStartedPulling="2025-10-01 13:59:13.391427995 +0000 UTC m=+876.135404203" lastFinishedPulling="2025-10-01 13:59:29.081803498 +0000 UTC m=+891.825779706" observedRunningTime="2025-10-01 13:59:31.942064719 +0000 UTC m=+894.686040927" watchObservedRunningTime="2025-10-01 13:59:31.949333064 +0000 UTC m=+894.693309272" Oct 01 13:59:32 crc kubenswrapper[4605]: I1001 13:59:32.005261 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-r2t7s" podStartSLOduration=5.576706468 podStartE2EDuration="22.005245563s" podCreationTimestamp="2025-10-01 13:59:10 +0000 UTC" firstStartedPulling="2025-10-01 13:59:12.656963887 +0000 UTC m=+875.400940095" lastFinishedPulling="2025-10-01 13:59:29.085502982 +0000 UTC m=+891.829479190" observedRunningTime="2025-10-01 13:59:31.963913664 +0000 UTC m=+894.707889872" watchObservedRunningTime="2025-10-01 13:59:32.005245563 +0000 UTC m=+894.749221771" Oct 01 13:59:32 crc kubenswrapper[4605]: I1001 13:59:32.005644 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-td7cl" podStartSLOduration=5.248928436 podStartE2EDuration="22.005640753s" podCreationTimestamp="2025-10-01 13:59:10 +0000 UTC" firstStartedPulling="2025-10-01 13:59:12.362334087 +0000 UTC m=+875.106310295" lastFinishedPulling="2025-10-01 13:59:29.119046404 +0000 UTC m=+891.863022612" observedRunningTime="2025-10-01 13:59:31.997352763 +0000 UTC m=+894.741328981" watchObservedRunningTime="2025-10-01 13:59:32.005640753 +0000 UTC m=+894.749616971" Oct 01 13:59:32 crc kubenswrapper[4605]: I1001 13:59:32.093661 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-88c7-rscdq" podStartSLOduration=5.35739508 podStartE2EDuration="21.093643878s" podCreationTimestamp="2025-10-01 13:59:11 +0000 UTC" firstStartedPulling="2025-10-01 13:59:13.34947215 +0000 UTC m=+876.093448358" lastFinishedPulling="2025-10-01 13:59:29.085720948 +0000 UTC m=+891.829697156" observedRunningTime="2025-10-01 13:59:32.093387861 +0000 UTC m=+894.837364069" watchObservedRunningTime="2025-10-01 13:59:32.093643878 +0000 UTC m=+894.837620086" Oct 01 13:59:41 crc kubenswrapper[4605]: I1001 13:59:41.121860 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-td7cl" Oct 01 13:59:41 crc kubenswrapper[4605]: I1001 13:59:41.151780 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-l6rjg" Oct 01 13:59:41 crc kubenswrapper[4605]: I1001 13:59:41.209442 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-r2t7s" Oct 01 13:59:41 crc kubenswrapper[4605]: I1001 13:59:41.267878 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-bc7cq" Oct 01 13:59:41 crc kubenswrapper[4605]: I1001 13:59:41.349658 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-4gzrq" Oct 01 13:59:41 crc kubenswrapper[4605]: I1001 13:59:41.538805 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-5cd4858477-sb7tm" Oct 01 13:59:41 crc kubenswrapper[4605]: I1001 13:59:41.630378 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-88c7-rscdq" Oct 01 13:59:41 crc kubenswrapper[4605]: I1001 13:59:41.634304 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-kxwrs" Oct 01 13:59:41 crc kubenswrapper[4605]: I1001 13:59:41.657344 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-64cd67b5cb-xrh9t" Oct 01 13:59:41 crc kubenswrapper[4605]: I1001 13:59:41.920254 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-665ff6bffd-fdn7m" event={"ID":"416364d4-8fac-4979-b4f1-e1f009f0b8cd","Type":"ContainerStarted","Data":"db35d140787e26398073c4ee446e852582d242a22dbaacab7fa026116ce164ed"} Oct 01 13:59:41 crc kubenswrapper[4605]: I1001 13:59:41.920761 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-665ff6bffd-fdn7m" Oct 01 13:59:41 crc kubenswrapper[4605]: I1001 13:59:41.921980 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-85777745bb-8rcr7" event={"ID":"6e215b5d-0b5c-4587-b8ab-f2a63fb41cb1","Type":"ContainerStarted","Data":"333c3dc083f2d61e4df0caf83aaf6d663190b7363be6c92f06311a1c0125715a"} Oct 01 13:59:41 crc kubenswrapper[4605]: I1001 13:59:41.922272 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-85777745bb-8rcr7" Oct 01 13:59:41 crc kubenswrapper[4605]: I1001 13:59:41.923581 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-tf7ln" event={"ID":"97fdc8f2-0472-4957-a59a-fd9474c0d15c","Type":"ContainerStarted","Data":"331b80dc2df32b4ebe8e3db2d047fce6d9bb6d21068fa966968484c67e8811ed"} Oct 01 13:59:41 crc kubenswrapper[4605]: I1001 13:59:41.923758 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-tf7ln" Oct 01 13:59:41 crc kubenswrapper[4605]: I1001 13:59:41.925452 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-9d6c5db85-p5vqs" event={"ID":"769cd151-8943-4faa-876c-e91d749ef107","Type":"ContainerStarted","Data":"6f25f26bb4ad45ff187967d9303171fb6456e29a3d222c047df2fa02717f8046"} Oct 01 13:59:41 crc kubenswrapper[4605]: I1001 13:59:41.934614 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-9d6c5db85-p5vqs" Oct 01 13:59:41 crc kubenswrapper[4605]: I1001 13:59:41.934696 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-849d5b9b84-b2fzm" Oct 01 13:59:41 crc kubenswrapper[4605]: I1001 13:59:41.934744 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-8sgk8" Oct 01 13:59:41 crc kubenswrapper[4605]: I1001 13:59:41.946765 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-665ff6bffd-fdn7m" podStartSLOduration=3.57558215 podStartE2EDuration="30.946750245s" podCreationTimestamp="2025-10-01 13:59:11 +0000 UTC" firstStartedPulling="2025-10-01 13:59:13.397447768 +0000 UTC m=+876.141423976" lastFinishedPulling="2025-10-01 13:59:40.768615863 +0000 UTC m=+903.512592071" observedRunningTime="2025-10-01 13:59:41.943696218 +0000 UTC m=+904.687672456" watchObservedRunningTime="2025-10-01 13:59:41.946750245 +0000 UTC m=+904.690726453" Oct 01 13:59:41 crc kubenswrapper[4605]: I1001 13:59:41.983772 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-tf7ln" podStartSLOduration=3.975914045 podStartE2EDuration="30.983755725s" podCreationTimestamp="2025-10-01 13:59:11 +0000 UTC" firstStartedPulling="2025-10-01 13:59:13.79770264 +0000 UTC m=+876.541678848" lastFinishedPulling="2025-10-01 13:59:40.80554432 +0000 UTC m=+903.549520528" observedRunningTime="2025-10-01 13:59:41.978310457 +0000 UTC m=+904.722286665" watchObservedRunningTime="2025-10-01 13:59:41.983755725 +0000 UTC m=+904.727731933" Oct 01 13:59:41 crc kubenswrapper[4605]: I1001 13:59:41.999487 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-85777745bb-8rcr7" podStartSLOduration=4.036069652 podStartE2EDuration="30.999472934s" podCreationTimestamp="2025-10-01 13:59:11 +0000 UTC" firstStartedPulling="2025-10-01 13:59:13.816661421 +0000 UTC m=+876.560637629" lastFinishedPulling="2025-10-01 13:59:40.780064703 +0000 UTC m=+903.524040911" observedRunningTime="2025-10-01 13:59:41.997578636 +0000 UTC m=+904.741554844" watchObservedRunningTime="2025-10-01 13:59:41.999472934 +0000 UTC m=+904.743449142" Oct 01 13:59:42 crc kubenswrapper[4605]: I1001 13:59:42.035986 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-9d6c5db85-p5vqs" podStartSLOduration=5.193559901 podStartE2EDuration="32.035970251s" podCreationTimestamp="2025-10-01 13:59:10 +0000 UTC" firstStartedPulling="2025-10-01 13:59:13.929063605 +0000 UTC m=+876.673039813" lastFinishedPulling="2025-10-01 13:59:40.771473955 +0000 UTC m=+903.515450163" observedRunningTime="2025-10-01 13:59:42.035380486 +0000 UTC m=+904.779356704" watchObservedRunningTime="2025-10-01 13:59:42.035970251 +0000 UTC m=+904.779946459" Oct 01 13:59:42 crc kubenswrapper[4605]: I1001 13:59:42.203485 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-x8fvl" Oct 01 13:59:42 crc kubenswrapper[4605]: I1001 13:59:42.576988 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-6b9957f54f-ct9tb" Oct 01 13:59:45 crc kubenswrapper[4605]: I1001 13:59:45.951492 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-84d6b4b759-xdntp" event={"ID":"34738360-dd91-4a55-b6d2-ab69d1bb5db4","Type":"ContainerStarted","Data":"d07681b42705aa687abc1de8b7ef9ed1872702265d57747cd39dad33a02ce108"} Oct 01 13:59:45 crc kubenswrapper[4605]: I1001 13:59:45.952000 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-84d6b4b759-xdntp" Oct 01 13:59:45 crc kubenswrapper[4605]: I1001 13:59:45.953213 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-77b9676b8cqd6pb" event={"ID":"4c039eff-8d65-45d5-9c1a-9fddca3c5e57","Type":"ContainerStarted","Data":"bfda0ec9cdd1b7ce330b8b51f8af28a8b54bc1b521507de32f00ee34a55a0bfd"} Oct 01 13:59:45 crc kubenswrapper[4605]: I1001 13:59:45.953389 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-77b9676b8cqd6pb" Oct 01 13:59:45 crc kubenswrapper[4605]: I1001 13:59:45.972167 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-84d6b4b759-xdntp" podStartSLOduration=3.170458633 podStartE2EDuration="34.972142488s" podCreationTimestamp="2025-10-01 13:59:11 +0000 UTC" firstStartedPulling="2025-10-01 13:59:13.774312156 +0000 UTC m=+876.518288364" lastFinishedPulling="2025-10-01 13:59:45.575996011 +0000 UTC m=+908.319972219" observedRunningTime="2025-10-01 13:59:45.969232174 +0000 UTC m=+908.713208382" watchObservedRunningTime="2025-10-01 13:59:45.972142488 +0000 UTC m=+908.716118706" Oct 01 13:59:46 crc kubenswrapper[4605]: I1001 13:59:46.962674 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-7b787867f4-cx7q7" event={"ID":"f105c6d3-5a2b-442c-ad1c-bcffd3fd869b","Type":"ContainerStarted","Data":"1f9c666d39ee26e5824052ee60b7f02a3ea05b3d4019e7ba5f2493ffcc60bd44"} Oct 01 13:59:46 crc kubenswrapper[4605]: I1001 13:59:46.963257 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-7b787867f4-cx7q7" Oct 01 13:59:46 crc kubenswrapper[4605]: I1001 13:59:46.964194 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-k4j9s" event={"ID":"2f85ca51-dac6-464b-8da5-b2b35511c3a7","Type":"ContainerStarted","Data":"a5f37fb5b28f1891e987df41d20b2c709b44406cca8942d3aea7a98e15f235ba"} Oct 01 13:59:46 crc kubenswrapper[4605]: I1001 13:59:46.983286 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-77b9676b8cqd6pb" podStartSLOduration=4.846374434 podStartE2EDuration="35.98327123s" podCreationTimestamp="2025-10-01 13:59:11 +0000 UTC" firstStartedPulling="2025-10-01 13:59:14.326307881 +0000 UTC m=+877.070284089" lastFinishedPulling="2025-10-01 13:59:45.463204687 +0000 UTC m=+908.207180885" observedRunningTime="2025-10-01 13:59:46.001543394 +0000 UTC m=+908.745519622" watchObservedRunningTime="2025-10-01 13:59:46.98327123 +0000 UTC m=+909.727247438" Oct 01 13:59:46 crc kubenswrapper[4605]: I1001 13:59:46.984989 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-7b787867f4-cx7q7" podStartSLOduration=2.972951711 podStartE2EDuration="35.984982554s" podCreationTimestamp="2025-10-01 13:59:11 +0000 UTC" firstStartedPulling="2025-10-01 13:59:13.384953121 +0000 UTC m=+876.128929329" lastFinishedPulling="2025-10-01 13:59:46.396983974 +0000 UTC m=+909.140960172" observedRunningTime="2025-10-01 13:59:46.981853934 +0000 UTC m=+909.725830142" watchObservedRunningTime="2025-10-01 13:59:46.984982554 +0000 UTC m=+909.728958762" Oct 01 13:59:47 crc kubenswrapper[4605]: I1001 13:59:47.006911 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-k4j9s" podStartSLOduration=3.895462953 podStartE2EDuration="37.00689483s" podCreationTimestamp="2025-10-01 13:59:10 +0000 UTC" firstStartedPulling="2025-10-01 13:59:13.284601613 +0000 UTC m=+876.028577821" lastFinishedPulling="2025-10-01 13:59:46.39603349 +0000 UTC m=+909.140009698" observedRunningTime="2025-10-01 13:59:47.003882554 +0000 UTC m=+909.747858762" watchObservedRunningTime="2025-10-01 13:59:47.00689483 +0000 UTC m=+909.750871038" Oct 01 13:59:51 crc kubenswrapper[4605]: I1001 13:59:51.133758 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-k4j9s" Oct 01 13:59:51 crc kubenswrapper[4605]: I1001 13:59:51.136179 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-k4j9s" Oct 01 13:59:51 crc kubenswrapper[4605]: I1001 13:59:51.534594 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-665ff6bffd-fdn7m" Oct 01 13:59:51 crc kubenswrapper[4605]: I1001 13:59:51.631648 4605 patch_prober.go:28] interesting pod/machine-config-daemon-zdjh7 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 13:59:51 crc kubenswrapper[4605]: I1001 13:59:51.631704 4605 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 13:59:51 crc kubenswrapper[4605]: I1001 13:59:51.969474 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-9d6c5db85-p5vqs" Oct 01 13:59:51 crc kubenswrapper[4605]: I1001 13:59:51.979568 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-7b787867f4-cx7q7" Oct 01 13:59:52 crc kubenswrapper[4605]: I1001 13:59:52.156464 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-84d6b4b759-xdntp" Oct 01 13:59:52 crc kubenswrapper[4605]: I1001 13:59:52.337418 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-tf7ln" Oct 01 13:59:52 crc kubenswrapper[4605]: I1001 13:59:52.524225 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-85777745bb-8rcr7" Oct 01 13:59:53 crc kubenswrapper[4605]: I1001 13:59:53.622630 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-77b9676b8cqd6pb" Oct 01 14:00:00 crc kubenswrapper[4605]: I1001 14:00:00.155836 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29322120-gl44q"] Oct 01 14:00:00 crc kubenswrapper[4605]: I1001 14:00:00.157881 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29322120-gl44q" Oct 01 14:00:00 crc kubenswrapper[4605]: I1001 14:00:00.163142 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 01 14:00:00 crc kubenswrapper[4605]: I1001 14:00:00.174477 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29322120-gl44q"] Oct 01 14:00:00 crc kubenswrapper[4605]: I1001 14:00:00.189118 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 01 14:00:00 crc kubenswrapper[4605]: I1001 14:00:00.214732 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/51dbd5d6-7c1d-4116-86d6-49d5108524cd-secret-volume\") pod \"collect-profiles-29322120-gl44q\" (UID: \"51dbd5d6-7c1d-4116-86d6-49d5108524cd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322120-gl44q" Oct 01 14:00:00 crc kubenswrapper[4605]: I1001 14:00:00.214825 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/51dbd5d6-7c1d-4116-86d6-49d5108524cd-config-volume\") pod \"collect-profiles-29322120-gl44q\" (UID: \"51dbd5d6-7c1d-4116-86d6-49d5108524cd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322120-gl44q" Oct 01 14:00:00 crc kubenswrapper[4605]: I1001 14:00:00.214935 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sxmwj\" (UniqueName: \"kubernetes.io/projected/51dbd5d6-7c1d-4116-86d6-49d5108524cd-kube-api-access-sxmwj\") pod \"collect-profiles-29322120-gl44q\" (UID: \"51dbd5d6-7c1d-4116-86d6-49d5108524cd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322120-gl44q" Oct 01 14:00:00 crc kubenswrapper[4605]: I1001 14:00:00.315552 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/51dbd5d6-7c1d-4116-86d6-49d5108524cd-config-volume\") pod \"collect-profiles-29322120-gl44q\" (UID: \"51dbd5d6-7c1d-4116-86d6-49d5108524cd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322120-gl44q" Oct 01 14:00:00 crc kubenswrapper[4605]: I1001 14:00:00.315643 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sxmwj\" (UniqueName: \"kubernetes.io/projected/51dbd5d6-7c1d-4116-86d6-49d5108524cd-kube-api-access-sxmwj\") pod \"collect-profiles-29322120-gl44q\" (UID: \"51dbd5d6-7c1d-4116-86d6-49d5108524cd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322120-gl44q" Oct 01 14:00:00 crc kubenswrapper[4605]: I1001 14:00:00.315688 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/51dbd5d6-7c1d-4116-86d6-49d5108524cd-secret-volume\") pod \"collect-profiles-29322120-gl44q\" (UID: \"51dbd5d6-7c1d-4116-86d6-49d5108524cd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322120-gl44q" Oct 01 14:00:00 crc kubenswrapper[4605]: I1001 14:00:00.317539 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/51dbd5d6-7c1d-4116-86d6-49d5108524cd-config-volume\") pod \"collect-profiles-29322120-gl44q\" (UID: \"51dbd5d6-7c1d-4116-86d6-49d5108524cd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322120-gl44q" Oct 01 14:00:00 crc kubenswrapper[4605]: I1001 14:00:00.333329 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sxmwj\" (UniqueName: \"kubernetes.io/projected/51dbd5d6-7c1d-4116-86d6-49d5108524cd-kube-api-access-sxmwj\") pod \"collect-profiles-29322120-gl44q\" (UID: \"51dbd5d6-7c1d-4116-86d6-49d5108524cd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322120-gl44q" Oct 01 14:00:00 crc kubenswrapper[4605]: I1001 14:00:00.333350 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/51dbd5d6-7c1d-4116-86d6-49d5108524cd-secret-volume\") pod \"collect-profiles-29322120-gl44q\" (UID: \"51dbd5d6-7c1d-4116-86d6-49d5108524cd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322120-gl44q" Oct 01 14:00:00 crc kubenswrapper[4605]: I1001 14:00:00.494275 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29322120-gl44q" Oct 01 14:00:00 crc kubenswrapper[4605]: I1001 14:00:00.898166 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29322120-gl44q"] Oct 01 14:00:00 crc kubenswrapper[4605]: W1001 14:00:00.903812 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod51dbd5d6_7c1d_4116_86d6_49d5108524cd.slice/crio-098386d4c8424224fdff9e6cf114faa46bb59829637e62a7348c76936455d42b WatchSource:0}: Error finding container 098386d4c8424224fdff9e6cf114faa46bb59829637e62a7348c76936455d42b: Status 404 returned error can't find the container with id 098386d4c8424224fdff9e6cf114faa46bb59829637e62a7348c76936455d42b Oct 01 14:00:01 crc kubenswrapper[4605]: I1001 14:00:01.068251 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29322120-gl44q" event={"ID":"51dbd5d6-7c1d-4116-86d6-49d5108524cd","Type":"ContainerStarted","Data":"b17679075d0a75aff5d9b6f3542463f5f5c63fd64e7c6b6bd7fe90e5a670c1fb"} Oct 01 14:00:01 crc kubenswrapper[4605]: I1001 14:00:01.068298 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29322120-gl44q" event={"ID":"51dbd5d6-7c1d-4116-86d6-49d5108524cd","Type":"ContainerStarted","Data":"098386d4c8424224fdff9e6cf114faa46bb59829637e62a7348c76936455d42b"} Oct 01 14:00:01 crc kubenswrapper[4605]: I1001 14:00:01.083556 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29322120-gl44q" podStartSLOduration=1.083541831 podStartE2EDuration="1.083541831s" podCreationTimestamp="2025-10-01 14:00:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 14:00:01.082656109 +0000 UTC m=+923.826632327" watchObservedRunningTime="2025-10-01 14:00:01.083541831 +0000 UTC m=+923.827518039" Oct 01 14:00:02 crc kubenswrapper[4605]: I1001 14:00:02.076129 4605 generic.go:334] "Generic (PLEG): container finished" podID="51dbd5d6-7c1d-4116-86d6-49d5108524cd" containerID="b17679075d0a75aff5d9b6f3542463f5f5c63fd64e7c6b6bd7fe90e5a670c1fb" exitCode=0 Oct 01 14:00:02 crc kubenswrapper[4605]: I1001 14:00:02.076238 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29322120-gl44q" event={"ID":"51dbd5d6-7c1d-4116-86d6-49d5108524cd","Type":"ContainerDied","Data":"b17679075d0a75aff5d9b6f3542463f5f5c63fd64e7c6b6bd7fe90e5a670c1fb"} Oct 01 14:00:03 crc kubenswrapper[4605]: I1001 14:00:03.345318 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29322120-gl44q" Oct 01 14:00:03 crc kubenswrapper[4605]: I1001 14:00:03.355125 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/51dbd5d6-7c1d-4116-86d6-49d5108524cd-config-volume\") pod \"51dbd5d6-7c1d-4116-86d6-49d5108524cd\" (UID: \"51dbd5d6-7c1d-4116-86d6-49d5108524cd\") " Oct 01 14:00:03 crc kubenswrapper[4605]: I1001 14:00:03.355212 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sxmwj\" (UniqueName: \"kubernetes.io/projected/51dbd5d6-7c1d-4116-86d6-49d5108524cd-kube-api-access-sxmwj\") pod \"51dbd5d6-7c1d-4116-86d6-49d5108524cd\" (UID: \"51dbd5d6-7c1d-4116-86d6-49d5108524cd\") " Oct 01 14:00:03 crc kubenswrapper[4605]: I1001 14:00:03.355397 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/51dbd5d6-7c1d-4116-86d6-49d5108524cd-secret-volume\") pod \"51dbd5d6-7c1d-4116-86d6-49d5108524cd\" (UID: \"51dbd5d6-7c1d-4116-86d6-49d5108524cd\") " Oct 01 14:00:03 crc kubenswrapper[4605]: I1001 14:00:03.356649 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/51dbd5d6-7c1d-4116-86d6-49d5108524cd-config-volume" (OuterVolumeSpecName: "config-volume") pod "51dbd5d6-7c1d-4116-86d6-49d5108524cd" (UID: "51dbd5d6-7c1d-4116-86d6-49d5108524cd"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:00:03 crc kubenswrapper[4605]: I1001 14:00:03.365244 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/51dbd5d6-7c1d-4116-86d6-49d5108524cd-kube-api-access-sxmwj" (OuterVolumeSpecName: "kube-api-access-sxmwj") pod "51dbd5d6-7c1d-4116-86d6-49d5108524cd" (UID: "51dbd5d6-7c1d-4116-86d6-49d5108524cd"). InnerVolumeSpecName "kube-api-access-sxmwj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:00:03 crc kubenswrapper[4605]: I1001 14:00:03.379890 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/51dbd5d6-7c1d-4116-86d6-49d5108524cd-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "51dbd5d6-7c1d-4116-86d6-49d5108524cd" (UID: "51dbd5d6-7c1d-4116-86d6-49d5108524cd"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:00:03 crc kubenswrapper[4605]: I1001 14:00:03.456628 4605 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/51dbd5d6-7c1d-4116-86d6-49d5108524cd-config-volume\") on node \"crc\" DevicePath \"\"" Oct 01 14:00:03 crc kubenswrapper[4605]: I1001 14:00:03.456709 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sxmwj\" (UniqueName: \"kubernetes.io/projected/51dbd5d6-7c1d-4116-86d6-49d5108524cd-kube-api-access-sxmwj\") on node \"crc\" DevicePath \"\"" Oct 01 14:00:03 crc kubenswrapper[4605]: I1001 14:00:03.456727 4605 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/51dbd5d6-7c1d-4116-86d6-49d5108524cd-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 01 14:00:04 crc kubenswrapper[4605]: I1001 14:00:04.096979 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29322120-gl44q" event={"ID":"51dbd5d6-7c1d-4116-86d6-49d5108524cd","Type":"ContainerDied","Data":"098386d4c8424224fdff9e6cf114faa46bb59829637e62a7348c76936455d42b"} Oct 01 14:00:04 crc kubenswrapper[4605]: I1001 14:00:04.097059 4605 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="098386d4c8424224fdff9e6cf114faa46bb59829637e62a7348c76936455d42b" Oct 01 14:00:04 crc kubenswrapper[4605]: I1001 14:00:04.097197 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29322120-gl44q" Oct 01 14:00:11 crc kubenswrapper[4605]: I1001 14:00:11.689795 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-n4trn"] Oct 01 14:00:11 crc kubenswrapper[4605]: E1001 14:00:11.690621 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="51dbd5d6-7c1d-4116-86d6-49d5108524cd" containerName="collect-profiles" Oct 01 14:00:11 crc kubenswrapper[4605]: I1001 14:00:11.690635 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="51dbd5d6-7c1d-4116-86d6-49d5108524cd" containerName="collect-profiles" Oct 01 14:00:11 crc kubenswrapper[4605]: I1001 14:00:11.690780 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="51dbd5d6-7c1d-4116-86d6-49d5108524cd" containerName="collect-profiles" Oct 01 14:00:11 crc kubenswrapper[4605]: I1001 14:00:11.691483 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-n4trn" Oct 01 14:00:11 crc kubenswrapper[4605]: I1001 14:00:11.694227 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Oct 01 14:00:11 crc kubenswrapper[4605]: I1001 14:00:11.695452 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Oct 01 14:00:11 crc kubenswrapper[4605]: I1001 14:00:11.695816 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-9cwkt" Oct 01 14:00:11 crc kubenswrapper[4605]: I1001 14:00:11.696054 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Oct 01 14:00:11 crc kubenswrapper[4605]: I1001 14:00:11.744988 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-n4trn"] Oct 01 14:00:11 crc kubenswrapper[4605]: I1001 14:00:11.764962 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3f3fa37c-0d11-441b-b5f7-aa027b731553-config\") pod \"dnsmasq-dns-675f4bcbfc-n4trn\" (UID: \"3f3fa37c-0d11-441b-b5f7-aa027b731553\") " pod="openstack/dnsmasq-dns-675f4bcbfc-n4trn" Oct 01 14:00:11 crc kubenswrapper[4605]: I1001 14:00:11.765064 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kb8bg\" (UniqueName: \"kubernetes.io/projected/3f3fa37c-0d11-441b-b5f7-aa027b731553-kube-api-access-kb8bg\") pod \"dnsmasq-dns-675f4bcbfc-n4trn\" (UID: \"3f3fa37c-0d11-441b-b5f7-aa027b731553\") " pod="openstack/dnsmasq-dns-675f4bcbfc-n4trn" Oct 01 14:00:11 crc kubenswrapper[4605]: I1001 14:00:11.781212 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-h8z8s"] Oct 01 14:00:11 crc kubenswrapper[4605]: I1001 14:00:11.782610 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-h8z8s" Oct 01 14:00:11 crc kubenswrapper[4605]: I1001 14:00:11.784487 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Oct 01 14:00:11 crc kubenswrapper[4605]: I1001 14:00:11.804700 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-h8z8s"] Oct 01 14:00:11 crc kubenswrapper[4605]: I1001 14:00:11.869821 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kb8bg\" (UniqueName: \"kubernetes.io/projected/3f3fa37c-0d11-441b-b5f7-aa027b731553-kube-api-access-kb8bg\") pod \"dnsmasq-dns-675f4bcbfc-n4trn\" (UID: \"3f3fa37c-0d11-441b-b5f7-aa027b731553\") " pod="openstack/dnsmasq-dns-675f4bcbfc-n4trn" Oct 01 14:00:11 crc kubenswrapper[4605]: I1001 14:00:11.870500 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3f3fa37c-0d11-441b-b5f7-aa027b731553-config\") pod \"dnsmasq-dns-675f4bcbfc-n4trn\" (UID: \"3f3fa37c-0d11-441b-b5f7-aa027b731553\") " pod="openstack/dnsmasq-dns-675f4bcbfc-n4trn" Oct 01 14:00:11 crc kubenswrapper[4605]: I1001 14:00:11.871399 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3f3fa37c-0d11-441b-b5f7-aa027b731553-config\") pod \"dnsmasq-dns-675f4bcbfc-n4trn\" (UID: \"3f3fa37c-0d11-441b-b5f7-aa027b731553\") " pod="openstack/dnsmasq-dns-675f4bcbfc-n4trn" Oct 01 14:00:11 crc kubenswrapper[4605]: I1001 14:00:11.871571 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/948aa060-96bc-43b1-8321-40354d477c98-config\") pod \"dnsmasq-dns-78dd6ddcc-h8z8s\" (UID: \"948aa060-96bc-43b1-8321-40354d477c98\") " pod="openstack/dnsmasq-dns-78dd6ddcc-h8z8s" Oct 01 14:00:11 crc kubenswrapper[4605]: I1001 14:00:11.871667 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/948aa060-96bc-43b1-8321-40354d477c98-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-h8z8s\" (UID: \"948aa060-96bc-43b1-8321-40354d477c98\") " pod="openstack/dnsmasq-dns-78dd6ddcc-h8z8s" Oct 01 14:00:11 crc kubenswrapper[4605]: I1001 14:00:11.871752 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jcwn9\" (UniqueName: \"kubernetes.io/projected/948aa060-96bc-43b1-8321-40354d477c98-kube-api-access-jcwn9\") pod \"dnsmasq-dns-78dd6ddcc-h8z8s\" (UID: \"948aa060-96bc-43b1-8321-40354d477c98\") " pod="openstack/dnsmasq-dns-78dd6ddcc-h8z8s" Oct 01 14:00:11 crc kubenswrapper[4605]: I1001 14:00:11.890211 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kb8bg\" (UniqueName: \"kubernetes.io/projected/3f3fa37c-0d11-441b-b5f7-aa027b731553-kube-api-access-kb8bg\") pod \"dnsmasq-dns-675f4bcbfc-n4trn\" (UID: \"3f3fa37c-0d11-441b-b5f7-aa027b731553\") " pod="openstack/dnsmasq-dns-675f4bcbfc-n4trn" Oct 01 14:00:11 crc kubenswrapper[4605]: I1001 14:00:11.972988 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/948aa060-96bc-43b1-8321-40354d477c98-config\") pod \"dnsmasq-dns-78dd6ddcc-h8z8s\" (UID: \"948aa060-96bc-43b1-8321-40354d477c98\") " pod="openstack/dnsmasq-dns-78dd6ddcc-h8z8s" Oct 01 14:00:11 crc kubenswrapper[4605]: I1001 14:00:11.973319 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/948aa060-96bc-43b1-8321-40354d477c98-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-h8z8s\" (UID: \"948aa060-96bc-43b1-8321-40354d477c98\") " pod="openstack/dnsmasq-dns-78dd6ddcc-h8z8s" Oct 01 14:00:11 crc kubenswrapper[4605]: I1001 14:00:11.973470 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jcwn9\" (UniqueName: \"kubernetes.io/projected/948aa060-96bc-43b1-8321-40354d477c98-kube-api-access-jcwn9\") pod \"dnsmasq-dns-78dd6ddcc-h8z8s\" (UID: \"948aa060-96bc-43b1-8321-40354d477c98\") " pod="openstack/dnsmasq-dns-78dd6ddcc-h8z8s" Oct 01 14:00:11 crc kubenswrapper[4605]: I1001 14:00:11.974122 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/948aa060-96bc-43b1-8321-40354d477c98-config\") pod \"dnsmasq-dns-78dd6ddcc-h8z8s\" (UID: \"948aa060-96bc-43b1-8321-40354d477c98\") " pod="openstack/dnsmasq-dns-78dd6ddcc-h8z8s" Oct 01 14:00:11 crc kubenswrapper[4605]: I1001 14:00:11.974659 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/948aa060-96bc-43b1-8321-40354d477c98-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-h8z8s\" (UID: \"948aa060-96bc-43b1-8321-40354d477c98\") " pod="openstack/dnsmasq-dns-78dd6ddcc-h8z8s" Oct 01 14:00:11 crc kubenswrapper[4605]: I1001 14:00:11.990799 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jcwn9\" (UniqueName: \"kubernetes.io/projected/948aa060-96bc-43b1-8321-40354d477c98-kube-api-access-jcwn9\") pod \"dnsmasq-dns-78dd6ddcc-h8z8s\" (UID: \"948aa060-96bc-43b1-8321-40354d477c98\") " pod="openstack/dnsmasq-dns-78dd6ddcc-h8z8s" Oct 01 14:00:12 crc kubenswrapper[4605]: I1001 14:00:12.011121 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-n4trn" Oct 01 14:00:12 crc kubenswrapper[4605]: I1001 14:00:12.106635 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-h8z8s" Oct 01 14:00:12 crc kubenswrapper[4605]: I1001 14:00:12.524925 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-n4trn"] Oct 01 14:00:12 crc kubenswrapper[4605]: W1001 14:00:12.532385 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3f3fa37c_0d11_441b_b5f7_aa027b731553.slice/crio-86a9d3d98f06bd1dd13ab23f39238d964051b731f873eed6c5e53ea8943ee735 WatchSource:0}: Error finding container 86a9d3d98f06bd1dd13ab23f39238d964051b731f873eed6c5e53ea8943ee735: Status 404 returned error can't find the container with id 86a9d3d98f06bd1dd13ab23f39238d964051b731f873eed6c5e53ea8943ee735 Oct 01 14:00:12 crc kubenswrapper[4605]: I1001 14:00:12.587911 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-h8z8s"] Oct 01 14:00:12 crc kubenswrapper[4605]: W1001 14:00:12.590078 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod948aa060_96bc_43b1_8321_40354d477c98.slice/crio-9e1952131784d1f83ea5c2cc836899cc3144905541ed85d28a22cabedbb2190a WatchSource:0}: Error finding container 9e1952131784d1f83ea5c2cc836899cc3144905541ed85d28a22cabedbb2190a: Status 404 returned error can't find the container with id 9e1952131784d1f83ea5c2cc836899cc3144905541ed85d28a22cabedbb2190a Oct 01 14:00:13 crc kubenswrapper[4605]: I1001 14:00:13.184042 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-h8z8s" event={"ID":"948aa060-96bc-43b1-8321-40354d477c98","Type":"ContainerStarted","Data":"9e1952131784d1f83ea5c2cc836899cc3144905541ed85d28a22cabedbb2190a"} Oct 01 14:00:13 crc kubenswrapper[4605]: I1001 14:00:13.186197 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-n4trn" event={"ID":"3f3fa37c-0d11-441b-b5f7-aa027b731553","Type":"ContainerStarted","Data":"86a9d3d98f06bd1dd13ab23f39238d964051b731f873eed6c5e53ea8943ee735"} Oct 01 14:00:14 crc kubenswrapper[4605]: I1001 14:00:14.681645 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-n4trn"] Oct 01 14:00:14 crc kubenswrapper[4605]: I1001 14:00:14.709346 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-6c8xk"] Oct 01 14:00:14 crc kubenswrapper[4605]: I1001 14:00:14.712318 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-6c8xk" Oct 01 14:00:14 crc kubenswrapper[4605]: I1001 14:00:14.744652 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-6c8xk"] Oct 01 14:00:14 crc kubenswrapper[4605]: I1001 14:00:14.762826 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e0d99f04-732e-4ed6-8d80-f2f4eeb723e1-config\") pod \"dnsmasq-dns-666b6646f7-6c8xk\" (UID: \"e0d99f04-732e-4ed6-8d80-f2f4eeb723e1\") " pod="openstack/dnsmasq-dns-666b6646f7-6c8xk" Oct 01 14:00:14 crc kubenswrapper[4605]: I1001 14:00:14.762886 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v2wgh\" (UniqueName: \"kubernetes.io/projected/e0d99f04-732e-4ed6-8d80-f2f4eeb723e1-kube-api-access-v2wgh\") pod \"dnsmasq-dns-666b6646f7-6c8xk\" (UID: \"e0d99f04-732e-4ed6-8d80-f2f4eeb723e1\") " pod="openstack/dnsmasq-dns-666b6646f7-6c8xk" Oct 01 14:00:14 crc kubenswrapper[4605]: I1001 14:00:14.762912 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e0d99f04-732e-4ed6-8d80-f2f4eeb723e1-dns-svc\") pod \"dnsmasq-dns-666b6646f7-6c8xk\" (UID: \"e0d99f04-732e-4ed6-8d80-f2f4eeb723e1\") " pod="openstack/dnsmasq-dns-666b6646f7-6c8xk" Oct 01 14:00:14 crc kubenswrapper[4605]: I1001 14:00:14.863939 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e0d99f04-732e-4ed6-8d80-f2f4eeb723e1-dns-svc\") pod \"dnsmasq-dns-666b6646f7-6c8xk\" (UID: \"e0d99f04-732e-4ed6-8d80-f2f4eeb723e1\") " pod="openstack/dnsmasq-dns-666b6646f7-6c8xk" Oct 01 14:00:14 crc kubenswrapper[4605]: I1001 14:00:14.864054 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e0d99f04-732e-4ed6-8d80-f2f4eeb723e1-config\") pod \"dnsmasq-dns-666b6646f7-6c8xk\" (UID: \"e0d99f04-732e-4ed6-8d80-f2f4eeb723e1\") " pod="openstack/dnsmasq-dns-666b6646f7-6c8xk" Oct 01 14:00:14 crc kubenswrapper[4605]: I1001 14:00:14.864071 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v2wgh\" (UniqueName: \"kubernetes.io/projected/e0d99f04-732e-4ed6-8d80-f2f4eeb723e1-kube-api-access-v2wgh\") pod \"dnsmasq-dns-666b6646f7-6c8xk\" (UID: \"e0d99f04-732e-4ed6-8d80-f2f4eeb723e1\") " pod="openstack/dnsmasq-dns-666b6646f7-6c8xk" Oct 01 14:00:14 crc kubenswrapper[4605]: I1001 14:00:14.865248 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e0d99f04-732e-4ed6-8d80-f2f4eeb723e1-config\") pod \"dnsmasq-dns-666b6646f7-6c8xk\" (UID: \"e0d99f04-732e-4ed6-8d80-f2f4eeb723e1\") " pod="openstack/dnsmasq-dns-666b6646f7-6c8xk" Oct 01 14:00:14 crc kubenswrapper[4605]: I1001 14:00:14.865251 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e0d99f04-732e-4ed6-8d80-f2f4eeb723e1-dns-svc\") pod \"dnsmasq-dns-666b6646f7-6c8xk\" (UID: \"e0d99f04-732e-4ed6-8d80-f2f4eeb723e1\") " pod="openstack/dnsmasq-dns-666b6646f7-6c8xk" Oct 01 14:00:14 crc kubenswrapper[4605]: I1001 14:00:14.897309 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v2wgh\" (UniqueName: \"kubernetes.io/projected/e0d99f04-732e-4ed6-8d80-f2f4eeb723e1-kube-api-access-v2wgh\") pod \"dnsmasq-dns-666b6646f7-6c8xk\" (UID: \"e0d99f04-732e-4ed6-8d80-f2f4eeb723e1\") " pod="openstack/dnsmasq-dns-666b6646f7-6c8xk" Oct 01 14:00:15 crc kubenswrapper[4605]: I1001 14:00:15.040233 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-6c8xk" Oct 01 14:00:15 crc kubenswrapper[4605]: I1001 14:00:15.040652 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-h8z8s"] Oct 01 14:00:15 crc kubenswrapper[4605]: I1001 14:00:15.062667 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-l6gfn"] Oct 01 14:00:15 crc kubenswrapper[4605]: I1001 14:00:15.063828 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-l6gfn" Oct 01 14:00:15 crc kubenswrapper[4605]: I1001 14:00:15.095323 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-l6gfn"] Oct 01 14:00:15 crc kubenswrapper[4605]: I1001 14:00:15.168970 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/72ac5f32-fb05-4196-ae15-a49ac651b84c-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-l6gfn\" (UID: \"72ac5f32-fb05-4196-ae15-a49ac651b84c\") " pod="openstack/dnsmasq-dns-57d769cc4f-l6gfn" Oct 01 14:00:15 crc kubenswrapper[4605]: I1001 14:00:15.169038 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/72ac5f32-fb05-4196-ae15-a49ac651b84c-config\") pod \"dnsmasq-dns-57d769cc4f-l6gfn\" (UID: \"72ac5f32-fb05-4196-ae15-a49ac651b84c\") " pod="openstack/dnsmasq-dns-57d769cc4f-l6gfn" Oct 01 14:00:15 crc kubenswrapper[4605]: I1001 14:00:15.169063 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kctmk\" (UniqueName: \"kubernetes.io/projected/72ac5f32-fb05-4196-ae15-a49ac651b84c-kube-api-access-kctmk\") pod \"dnsmasq-dns-57d769cc4f-l6gfn\" (UID: \"72ac5f32-fb05-4196-ae15-a49ac651b84c\") " pod="openstack/dnsmasq-dns-57d769cc4f-l6gfn" Oct 01 14:00:15 crc kubenswrapper[4605]: I1001 14:00:15.271761 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/72ac5f32-fb05-4196-ae15-a49ac651b84c-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-l6gfn\" (UID: \"72ac5f32-fb05-4196-ae15-a49ac651b84c\") " pod="openstack/dnsmasq-dns-57d769cc4f-l6gfn" Oct 01 14:00:15 crc kubenswrapper[4605]: I1001 14:00:15.272121 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/72ac5f32-fb05-4196-ae15-a49ac651b84c-config\") pod \"dnsmasq-dns-57d769cc4f-l6gfn\" (UID: \"72ac5f32-fb05-4196-ae15-a49ac651b84c\") " pod="openstack/dnsmasq-dns-57d769cc4f-l6gfn" Oct 01 14:00:15 crc kubenswrapper[4605]: I1001 14:00:15.272154 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kctmk\" (UniqueName: \"kubernetes.io/projected/72ac5f32-fb05-4196-ae15-a49ac651b84c-kube-api-access-kctmk\") pod \"dnsmasq-dns-57d769cc4f-l6gfn\" (UID: \"72ac5f32-fb05-4196-ae15-a49ac651b84c\") " pod="openstack/dnsmasq-dns-57d769cc4f-l6gfn" Oct 01 14:00:15 crc kubenswrapper[4605]: I1001 14:00:15.273388 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/72ac5f32-fb05-4196-ae15-a49ac651b84c-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-l6gfn\" (UID: \"72ac5f32-fb05-4196-ae15-a49ac651b84c\") " pod="openstack/dnsmasq-dns-57d769cc4f-l6gfn" Oct 01 14:00:15 crc kubenswrapper[4605]: I1001 14:00:15.274473 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/72ac5f32-fb05-4196-ae15-a49ac651b84c-config\") pod \"dnsmasq-dns-57d769cc4f-l6gfn\" (UID: \"72ac5f32-fb05-4196-ae15-a49ac651b84c\") " pod="openstack/dnsmasq-dns-57d769cc4f-l6gfn" Oct 01 14:00:15 crc kubenswrapper[4605]: I1001 14:00:15.290607 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kctmk\" (UniqueName: \"kubernetes.io/projected/72ac5f32-fb05-4196-ae15-a49ac651b84c-kube-api-access-kctmk\") pod \"dnsmasq-dns-57d769cc4f-l6gfn\" (UID: \"72ac5f32-fb05-4196-ae15-a49ac651b84c\") " pod="openstack/dnsmasq-dns-57d769cc4f-l6gfn" Oct 01 14:00:15 crc kubenswrapper[4605]: I1001 14:00:15.446552 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-l6gfn" Oct 01 14:00:15 crc kubenswrapper[4605]: I1001 14:00:15.652902 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-6c8xk"] Oct 01 14:00:15 crc kubenswrapper[4605]: I1001 14:00:15.846253 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-l6gfn"] Oct 01 14:00:15 crc kubenswrapper[4605]: I1001 14:00:15.881916 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Oct 01 14:00:15 crc kubenswrapper[4605]: I1001 14:00:15.882999 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 01 14:00:15 crc kubenswrapper[4605]: I1001 14:00:15.886644 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Oct 01 14:00:15 crc kubenswrapper[4605]: I1001 14:00:15.886841 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Oct 01 14:00:15 crc kubenswrapper[4605]: I1001 14:00:15.886933 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Oct 01 14:00:15 crc kubenswrapper[4605]: I1001 14:00:15.887019 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-vrnd4" Oct 01 14:00:15 crc kubenswrapper[4605]: I1001 14:00:15.887707 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Oct 01 14:00:15 crc kubenswrapper[4605]: I1001 14:00:15.894442 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Oct 01 14:00:15 crc kubenswrapper[4605]: I1001 14:00:15.894632 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Oct 01 14:00:15 crc kubenswrapper[4605]: I1001 14:00:15.916205 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 01 14:00:15 crc kubenswrapper[4605]: I1001 14:00:15.998564 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/d753cd5e-e85d-424c-a439-2b51cbedf76f-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"d753cd5e-e85d-424c-a439-2b51cbedf76f\") " pod="openstack/rabbitmq-server-0" Oct 01 14:00:15 crc kubenswrapper[4605]: I1001 14:00:15.998647 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d753cd5e-e85d-424c-a439-2b51cbedf76f-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"d753cd5e-e85d-424c-a439-2b51cbedf76f\") " pod="openstack/rabbitmq-server-0" Oct 01 14:00:15 crc kubenswrapper[4605]: I1001 14:00:15.998678 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d753cd5e-e85d-424c-a439-2b51cbedf76f-server-conf\") pod \"rabbitmq-server-0\" (UID: \"d753cd5e-e85d-424c-a439-2b51cbedf76f\") " pod="openstack/rabbitmq-server-0" Oct 01 14:00:15 crc kubenswrapper[4605]: I1001 14:00:15.998702 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d753cd5e-e85d-424c-a439-2b51cbedf76f-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"d753cd5e-e85d-424c-a439-2b51cbedf76f\") " pod="openstack/rabbitmq-server-0" Oct 01 14:00:15 crc kubenswrapper[4605]: I1001 14:00:15.998721 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d753cd5e-e85d-424c-a439-2b51cbedf76f-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"d753cd5e-e85d-424c-a439-2b51cbedf76f\") " pod="openstack/rabbitmq-server-0" Oct 01 14:00:15 crc kubenswrapper[4605]: I1001 14:00:15.998741 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d753cd5e-e85d-424c-a439-2b51cbedf76f-pod-info\") pod \"rabbitmq-server-0\" (UID: \"d753cd5e-e85d-424c-a439-2b51cbedf76f\") " pod="openstack/rabbitmq-server-0" Oct 01 14:00:15 crc kubenswrapper[4605]: I1001 14:00:15.998761 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d753cd5e-e85d-424c-a439-2b51cbedf76f-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"d753cd5e-e85d-424c-a439-2b51cbedf76f\") " pod="openstack/rabbitmq-server-0" Oct 01 14:00:15 crc kubenswrapper[4605]: I1001 14:00:15.998788 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-server-0\" (UID: \"d753cd5e-e85d-424c-a439-2b51cbedf76f\") " pod="openstack/rabbitmq-server-0" Oct 01 14:00:15 crc kubenswrapper[4605]: I1001 14:00:15.998812 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d753cd5e-e85d-424c-a439-2b51cbedf76f-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"d753cd5e-e85d-424c-a439-2b51cbedf76f\") " pod="openstack/rabbitmq-server-0" Oct 01 14:00:15 crc kubenswrapper[4605]: I1001 14:00:15.998832 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qfqwp\" (UniqueName: \"kubernetes.io/projected/d753cd5e-e85d-424c-a439-2b51cbedf76f-kube-api-access-qfqwp\") pod \"rabbitmq-server-0\" (UID: \"d753cd5e-e85d-424c-a439-2b51cbedf76f\") " pod="openstack/rabbitmq-server-0" Oct 01 14:00:15 crc kubenswrapper[4605]: I1001 14:00:15.998864 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d753cd5e-e85d-424c-a439-2b51cbedf76f-config-data\") pod \"rabbitmq-server-0\" (UID: \"d753cd5e-e85d-424c-a439-2b51cbedf76f\") " pod="openstack/rabbitmq-server-0" Oct 01 14:00:16 crc kubenswrapper[4605]: I1001 14:00:16.100563 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/d753cd5e-e85d-424c-a439-2b51cbedf76f-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"d753cd5e-e85d-424c-a439-2b51cbedf76f\") " pod="openstack/rabbitmq-server-0" Oct 01 14:00:16 crc kubenswrapper[4605]: I1001 14:00:16.100651 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d753cd5e-e85d-424c-a439-2b51cbedf76f-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"d753cd5e-e85d-424c-a439-2b51cbedf76f\") " pod="openstack/rabbitmq-server-0" Oct 01 14:00:16 crc kubenswrapper[4605]: I1001 14:00:16.100701 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d753cd5e-e85d-424c-a439-2b51cbedf76f-server-conf\") pod \"rabbitmq-server-0\" (UID: \"d753cd5e-e85d-424c-a439-2b51cbedf76f\") " pod="openstack/rabbitmq-server-0" Oct 01 14:00:16 crc kubenswrapper[4605]: I1001 14:00:16.100733 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d753cd5e-e85d-424c-a439-2b51cbedf76f-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"d753cd5e-e85d-424c-a439-2b51cbedf76f\") " pod="openstack/rabbitmq-server-0" Oct 01 14:00:16 crc kubenswrapper[4605]: I1001 14:00:16.100760 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d753cd5e-e85d-424c-a439-2b51cbedf76f-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"d753cd5e-e85d-424c-a439-2b51cbedf76f\") " pod="openstack/rabbitmq-server-0" Oct 01 14:00:16 crc kubenswrapper[4605]: I1001 14:00:16.100784 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d753cd5e-e85d-424c-a439-2b51cbedf76f-pod-info\") pod \"rabbitmq-server-0\" (UID: \"d753cd5e-e85d-424c-a439-2b51cbedf76f\") " pod="openstack/rabbitmq-server-0" Oct 01 14:00:16 crc kubenswrapper[4605]: I1001 14:00:16.100807 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d753cd5e-e85d-424c-a439-2b51cbedf76f-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"d753cd5e-e85d-424c-a439-2b51cbedf76f\") " pod="openstack/rabbitmq-server-0" Oct 01 14:00:16 crc kubenswrapper[4605]: I1001 14:00:16.100847 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-server-0\" (UID: \"d753cd5e-e85d-424c-a439-2b51cbedf76f\") " pod="openstack/rabbitmq-server-0" Oct 01 14:00:16 crc kubenswrapper[4605]: I1001 14:00:16.100881 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d753cd5e-e85d-424c-a439-2b51cbedf76f-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"d753cd5e-e85d-424c-a439-2b51cbedf76f\") " pod="openstack/rabbitmq-server-0" Oct 01 14:00:16 crc kubenswrapper[4605]: I1001 14:00:16.100902 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qfqwp\" (UniqueName: \"kubernetes.io/projected/d753cd5e-e85d-424c-a439-2b51cbedf76f-kube-api-access-qfqwp\") pod \"rabbitmq-server-0\" (UID: \"d753cd5e-e85d-424c-a439-2b51cbedf76f\") " pod="openstack/rabbitmq-server-0" Oct 01 14:00:16 crc kubenswrapper[4605]: I1001 14:00:16.100930 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d753cd5e-e85d-424c-a439-2b51cbedf76f-config-data\") pod \"rabbitmq-server-0\" (UID: \"d753cd5e-e85d-424c-a439-2b51cbedf76f\") " pod="openstack/rabbitmq-server-0" Oct 01 14:00:16 crc kubenswrapper[4605]: I1001 14:00:16.101363 4605 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-server-0\" (UID: \"d753cd5e-e85d-424c-a439-2b51cbedf76f\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/rabbitmq-server-0" Oct 01 14:00:16 crc kubenswrapper[4605]: I1001 14:00:16.102074 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d753cd5e-e85d-424c-a439-2b51cbedf76f-config-data\") pod \"rabbitmq-server-0\" (UID: \"d753cd5e-e85d-424c-a439-2b51cbedf76f\") " pod="openstack/rabbitmq-server-0" Oct 01 14:00:16 crc kubenswrapper[4605]: I1001 14:00:16.103050 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d753cd5e-e85d-424c-a439-2b51cbedf76f-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"d753cd5e-e85d-424c-a439-2b51cbedf76f\") " pod="openstack/rabbitmq-server-0" Oct 01 14:00:16 crc kubenswrapper[4605]: I1001 14:00:16.104145 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d753cd5e-e85d-424c-a439-2b51cbedf76f-server-conf\") pod \"rabbitmq-server-0\" (UID: \"d753cd5e-e85d-424c-a439-2b51cbedf76f\") " pod="openstack/rabbitmq-server-0" Oct 01 14:00:16 crc kubenswrapper[4605]: I1001 14:00:16.104247 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d753cd5e-e85d-424c-a439-2b51cbedf76f-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"d753cd5e-e85d-424c-a439-2b51cbedf76f\") " pod="openstack/rabbitmq-server-0" Oct 01 14:00:16 crc kubenswrapper[4605]: I1001 14:00:16.104585 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d753cd5e-e85d-424c-a439-2b51cbedf76f-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"d753cd5e-e85d-424c-a439-2b51cbedf76f\") " pod="openstack/rabbitmq-server-0" Oct 01 14:00:16 crc kubenswrapper[4605]: I1001 14:00:16.113411 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d753cd5e-e85d-424c-a439-2b51cbedf76f-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"d753cd5e-e85d-424c-a439-2b51cbedf76f\") " pod="openstack/rabbitmq-server-0" Oct 01 14:00:16 crc kubenswrapper[4605]: I1001 14:00:16.119382 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qfqwp\" (UniqueName: \"kubernetes.io/projected/d753cd5e-e85d-424c-a439-2b51cbedf76f-kube-api-access-qfqwp\") pod \"rabbitmq-server-0\" (UID: \"d753cd5e-e85d-424c-a439-2b51cbedf76f\") " pod="openstack/rabbitmq-server-0" Oct 01 14:00:16 crc kubenswrapper[4605]: I1001 14:00:16.119912 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d753cd5e-e85d-424c-a439-2b51cbedf76f-pod-info\") pod \"rabbitmq-server-0\" (UID: \"d753cd5e-e85d-424c-a439-2b51cbedf76f\") " pod="openstack/rabbitmq-server-0" Oct 01 14:00:16 crc kubenswrapper[4605]: I1001 14:00:16.127828 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-server-0\" (UID: \"d753cd5e-e85d-424c-a439-2b51cbedf76f\") " pod="openstack/rabbitmq-server-0" Oct 01 14:00:16 crc kubenswrapper[4605]: I1001 14:00:16.128541 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/d753cd5e-e85d-424c-a439-2b51cbedf76f-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"d753cd5e-e85d-424c-a439-2b51cbedf76f\") " pod="openstack/rabbitmq-server-0" Oct 01 14:00:16 crc kubenswrapper[4605]: I1001 14:00:16.128915 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d753cd5e-e85d-424c-a439-2b51cbedf76f-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"d753cd5e-e85d-424c-a439-2b51cbedf76f\") " pod="openstack/rabbitmq-server-0" Oct 01 14:00:16 crc kubenswrapper[4605]: I1001 14:00:16.212295 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 01 14:00:16 crc kubenswrapper[4605]: I1001 14:00:16.213578 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:00:16 crc kubenswrapper[4605]: I1001 14:00:16.219537 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-2w8nd" Oct 01 14:00:16 crc kubenswrapper[4605]: I1001 14:00:16.219700 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Oct 01 14:00:16 crc kubenswrapper[4605]: I1001 14:00:16.219843 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Oct 01 14:00:16 crc kubenswrapper[4605]: I1001 14:00:16.220042 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Oct 01 14:00:16 crc kubenswrapper[4605]: I1001 14:00:16.220132 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Oct 01 14:00:16 crc kubenswrapper[4605]: I1001 14:00:16.220414 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Oct 01 14:00:16 crc kubenswrapper[4605]: I1001 14:00:16.229039 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Oct 01 14:00:16 crc kubenswrapper[4605]: I1001 14:00:16.230603 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 01 14:00:16 crc kubenswrapper[4605]: I1001 14:00:16.239252 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 01 14:00:16 crc kubenswrapper[4605]: I1001 14:00:16.260236 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-l6gfn" event={"ID":"72ac5f32-fb05-4196-ae15-a49ac651b84c","Type":"ContainerStarted","Data":"71a19a6ff39b74e2276932c8b67b9e969565f33ef15223733633ddc17f6d0144"} Oct 01 14:00:16 crc kubenswrapper[4605]: I1001 14:00:16.260288 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-6c8xk" event={"ID":"e0d99f04-732e-4ed6-8d80-f2f4eeb723e1","Type":"ContainerStarted","Data":"770e1711c5bed729719a1a7e2b4a4ec3efb0ea731c5ccb5fd4b198b0da59ece9"} Oct 01 14:00:16 crc kubenswrapper[4605]: I1001 14:00:16.311901 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/18fdf98a-ad5b-4930-b8cc-2422242aac16-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"18fdf98a-ad5b-4930-b8cc-2422242aac16\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:00:16 crc kubenswrapper[4605]: I1001 14:00:16.311964 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/18fdf98a-ad5b-4930-b8cc-2422242aac16-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"18fdf98a-ad5b-4930-b8cc-2422242aac16\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:00:16 crc kubenswrapper[4605]: I1001 14:00:16.312004 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/18fdf98a-ad5b-4930-b8cc-2422242aac16-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"18fdf98a-ad5b-4930-b8cc-2422242aac16\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:00:16 crc kubenswrapper[4605]: I1001 14:00:16.312033 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"18fdf98a-ad5b-4930-b8cc-2422242aac16\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:00:16 crc kubenswrapper[4605]: I1001 14:00:16.312049 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/18fdf98a-ad5b-4930-b8cc-2422242aac16-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"18fdf98a-ad5b-4930-b8cc-2422242aac16\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:00:16 crc kubenswrapper[4605]: I1001 14:00:16.312077 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/18fdf98a-ad5b-4930-b8cc-2422242aac16-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"18fdf98a-ad5b-4930-b8cc-2422242aac16\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:00:16 crc kubenswrapper[4605]: I1001 14:00:16.312116 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/18fdf98a-ad5b-4930-b8cc-2422242aac16-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"18fdf98a-ad5b-4930-b8cc-2422242aac16\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:00:16 crc kubenswrapper[4605]: I1001 14:00:16.312146 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dx5xn\" (UniqueName: \"kubernetes.io/projected/18fdf98a-ad5b-4930-b8cc-2422242aac16-kube-api-access-dx5xn\") pod \"rabbitmq-cell1-server-0\" (UID: \"18fdf98a-ad5b-4930-b8cc-2422242aac16\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:00:16 crc kubenswrapper[4605]: I1001 14:00:16.312167 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/18fdf98a-ad5b-4930-b8cc-2422242aac16-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"18fdf98a-ad5b-4930-b8cc-2422242aac16\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:00:16 crc kubenswrapper[4605]: I1001 14:00:16.312182 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/18fdf98a-ad5b-4930-b8cc-2422242aac16-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"18fdf98a-ad5b-4930-b8cc-2422242aac16\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:00:16 crc kubenswrapper[4605]: I1001 14:00:16.312209 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/18fdf98a-ad5b-4930-b8cc-2422242aac16-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"18fdf98a-ad5b-4930-b8cc-2422242aac16\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:00:16 crc kubenswrapper[4605]: I1001 14:00:16.413443 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/18fdf98a-ad5b-4930-b8cc-2422242aac16-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"18fdf98a-ad5b-4930-b8cc-2422242aac16\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:00:16 crc kubenswrapper[4605]: I1001 14:00:16.413502 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"18fdf98a-ad5b-4930-b8cc-2422242aac16\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:00:16 crc kubenswrapper[4605]: I1001 14:00:16.413521 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/18fdf98a-ad5b-4930-b8cc-2422242aac16-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"18fdf98a-ad5b-4930-b8cc-2422242aac16\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:00:16 crc kubenswrapper[4605]: I1001 14:00:16.413547 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/18fdf98a-ad5b-4930-b8cc-2422242aac16-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"18fdf98a-ad5b-4930-b8cc-2422242aac16\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:00:16 crc kubenswrapper[4605]: I1001 14:00:16.413567 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/18fdf98a-ad5b-4930-b8cc-2422242aac16-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"18fdf98a-ad5b-4930-b8cc-2422242aac16\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:00:16 crc kubenswrapper[4605]: I1001 14:00:16.413585 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dx5xn\" (UniqueName: \"kubernetes.io/projected/18fdf98a-ad5b-4930-b8cc-2422242aac16-kube-api-access-dx5xn\") pod \"rabbitmq-cell1-server-0\" (UID: \"18fdf98a-ad5b-4930-b8cc-2422242aac16\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:00:16 crc kubenswrapper[4605]: I1001 14:00:16.413608 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/18fdf98a-ad5b-4930-b8cc-2422242aac16-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"18fdf98a-ad5b-4930-b8cc-2422242aac16\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:00:16 crc kubenswrapper[4605]: I1001 14:00:16.413627 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/18fdf98a-ad5b-4930-b8cc-2422242aac16-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"18fdf98a-ad5b-4930-b8cc-2422242aac16\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:00:16 crc kubenswrapper[4605]: I1001 14:00:16.413654 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/18fdf98a-ad5b-4930-b8cc-2422242aac16-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"18fdf98a-ad5b-4930-b8cc-2422242aac16\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:00:16 crc kubenswrapper[4605]: I1001 14:00:16.413679 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/18fdf98a-ad5b-4930-b8cc-2422242aac16-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"18fdf98a-ad5b-4930-b8cc-2422242aac16\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:00:16 crc kubenswrapper[4605]: I1001 14:00:16.413696 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/18fdf98a-ad5b-4930-b8cc-2422242aac16-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"18fdf98a-ad5b-4930-b8cc-2422242aac16\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:00:16 crc kubenswrapper[4605]: I1001 14:00:16.416378 4605 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"18fdf98a-ad5b-4930-b8cc-2422242aac16\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:00:16 crc kubenswrapper[4605]: I1001 14:00:16.416717 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/18fdf98a-ad5b-4930-b8cc-2422242aac16-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"18fdf98a-ad5b-4930-b8cc-2422242aac16\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:00:16 crc kubenswrapper[4605]: I1001 14:00:16.417230 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/18fdf98a-ad5b-4930-b8cc-2422242aac16-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"18fdf98a-ad5b-4930-b8cc-2422242aac16\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:00:16 crc kubenswrapper[4605]: I1001 14:00:16.418005 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/18fdf98a-ad5b-4930-b8cc-2422242aac16-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"18fdf98a-ad5b-4930-b8cc-2422242aac16\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:00:16 crc kubenswrapper[4605]: I1001 14:00:16.418988 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/18fdf98a-ad5b-4930-b8cc-2422242aac16-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"18fdf98a-ad5b-4930-b8cc-2422242aac16\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:00:16 crc kubenswrapper[4605]: I1001 14:00:16.421249 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/18fdf98a-ad5b-4930-b8cc-2422242aac16-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"18fdf98a-ad5b-4930-b8cc-2422242aac16\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:00:16 crc kubenswrapper[4605]: I1001 14:00:16.421915 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/18fdf98a-ad5b-4930-b8cc-2422242aac16-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"18fdf98a-ad5b-4930-b8cc-2422242aac16\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:00:16 crc kubenswrapper[4605]: I1001 14:00:16.422500 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/18fdf98a-ad5b-4930-b8cc-2422242aac16-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"18fdf98a-ad5b-4930-b8cc-2422242aac16\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:00:16 crc kubenswrapper[4605]: I1001 14:00:16.425652 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/18fdf98a-ad5b-4930-b8cc-2422242aac16-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"18fdf98a-ad5b-4930-b8cc-2422242aac16\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:00:16 crc kubenswrapper[4605]: I1001 14:00:16.430952 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/18fdf98a-ad5b-4930-b8cc-2422242aac16-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"18fdf98a-ad5b-4930-b8cc-2422242aac16\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:00:16 crc kubenswrapper[4605]: I1001 14:00:16.432687 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dx5xn\" (UniqueName: \"kubernetes.io/projected/18fdf98a-ad5b-4930-b8cc-2422242aac16-kube-api-access-dx5xn\") pod \"rabbitmq-cell1-server-0\" (UID: \"18fdf98a-ad5b-4930-b8cc-2422242aac16\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:00:16 crc kubenswrapper[4605]: I1001 14:00:16.446057 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"18fdf98a-ad5b-4930-b8cc-2422242aac16\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:00:16 crc kubenswrapper[4605]: I1001 14:00:16.552487 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:00:16 crc kubenswrapper[4605]: W1001 14:00:16.917453 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd753cd5e_e85d_424c_a439_2b51cbedf76f.slice/crio-0f0186c30fd5f9c10d53cd3308c5d4b358d554720efb502d06501244c360c800 WatchSource:0}: Error finding container 0f0186c30fd5f9c10d53cd3308c5d4b358d554720efb502d06501244c360c800: Status 404 returned error can't find the container with id 0f0186c30fd5f9c10d53cd3308c5d4b358d554720efb502d06501244c360c800 Oct 01 14:00:16 crc kubenswrapper[4605]: I1001 14:00:16.918044 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 01 14:00:17 crc kubenswrapper[4605]: I1001 14:00:17.192529 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 01 14:00:17 crc kubenswrapper[4605]: I1001 14:00:17.295048 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"d753cd5e-e85d-424c-a439-2b51cbedf76f","Type":"ContainerStarted","Data":"0f0186c30fd5f9c10d53cd3308c5d4b358d554720efb502d06501244c360c800"} Oct 01 14:00:17 crc kubenswrapper[4605]: I1001 14:00:17.300750 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"18fdf98a-ad5b-4930-b8cc-2422242aac16","Type":"ContainerStarted","Data":"803101d80f0b191f7f7ec7b373785c442756cc9eeee4856ddf5b94aa5baea3e3"} Oct 01 14:00:18 crc kubenswrapper[4605]: I1001 14:00:18.726489 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Oct 01 14:00:18 crc kubenswrapper[4605]: I1001 14:00:18.744249 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Oct 01 14:00:18 crc kubenswrapper[4605]: I1001 14:00:18.756355 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Oct 01 14:00:18 crc kubenswrapper[4605]: I1001 14:00:18.764149 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-psffg" Oct 01 14:00:18 crc kubenswrapper[4605]: I1001 14:00:18.767661 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Oct 01 14:00:18 crc kubenswrapper[4605]: I1001 14:00:18.786346 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 01 14:00:18 crc kubenswrapper[4605]: I1001 14:00:18.787574 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Oct 01 14:00:18 crc kubenswrapper[4605]: I1001 14:00:18.792385 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Oct 01 14:00:18 crc kubenswrapper[4605]: I1001 14:00:18.809143 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Oct 01 14:00:18 crc kubenswrapper[4605]: I1001 14:00:18.815675 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Oct 01 14:00:18 crc kubenswrapper[4605]: I1001 14:00:18.836149 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Oct 01 14:00:18 crc kubenswrapper[4605]: I1001 14:00:18.838126 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-9nblc" Oct 01 14:00:18 crc kubenswrapper[4605]: I1001 14:00:18.849701 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Oct 01 14:00:18 crc kubenswrapper[4605]: I1001 14:00:18.853398 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Oct 01 14:00:18 crc kubenswrapper[4605]: I1001 14:00:18.851583 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Oct 01 14:00:18 crc kubenswrapper[4605]: I1001 14:00:18.861050 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 01 14:00:18 crc kubenswrapper[4605]: I1001 14:00:18.893851 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/c414b65e-0cce-4d58-aa5d-08d0679595cd-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"c414b65e-0cce-4d58-aa5d-08d0679595cd\") " pod="openstack/openstack-cell1-galera-0" Oct 01 14:00:18 crc kubenswrapper[4605]: I1001 14:00:18.893895 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/e3e2c30f-0d23-4f2a-8cc5-5bacf9bcce9e-config-data-default\") pod \"openstack-galera-0\" (UID: \"e3e2c30f-0d23-4f2a-8cc5-5bacf9bcce9e\") " pod="openstack/openstack-galera-0" Oct 01 14:00:18 crc kubenswrapper[4605]: I1001 14:00:18.893917 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e3e2c30f-0d23-4f2a-8cc5-5bacf9bcce9e-operator-scripts\") pod \"openstack-galera-0\" (UID: \"e3e2c30f-0d23-4f2a-8cc5-5bacf9bcce9e\") " pod="openstack/openstack-galera-0" Oct 01 14:00:18 crc kubenswrapper[4605]: I1001 14:00:18.894523 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"openstack-cell1-galera-0\" (UID: \"c414b65e-0cce-4d58-aa5d-08d0679595cd\") " pod="openstack/openstack-cell1-galera-0" Oct 01 14:00:18 crc kubenswrapper[4605]: I1001 14:00:18.894558 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/c414b65e-0cce-4d58-aa5d-08d0679595cd-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"c414b65e-0cce-4d58-aa5d-08d0679595cd\") " pod="openstack/openstack-cell1-galera-0" Oct 01 14:00:18 crc kubenswrapper[4605]: I1001 14:00:18.894574 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/c414b65e-0cce-4d58-aa5d-08d0679595cd-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"c414b65e-0cce-4d58-aa5d-08d0679595cd\") " pod="openstack/openstack-cell1-galera-0" Oct 01 14:00:18 crc kubenswrapper[4605]: I1001 14:00:18.894593 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5pvcl\" (UniqueName: \"kubernetes.io/projected/c414b65e-0cce-4d58-aa5d-08d0679595cd-kube-api-access-5pvcl\") pod \"openstack-cell1-galera-0\" (UID: \"c414b65e-0cce-4d58-aa5d-08d0679595cd\") " pod="openstack/openstack-cell1-galera-0" Oct 01 14:00:18 crc kubenswrapper[4605]: I1001 14:00:18.894611 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/e3e2c30f-0d23-4f2a-8cc5-5bacf9bcce9e-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"e3e2c30f-0d23-4f2a-8cc5-5bacf9bcce9e\") " pod="openstack/openstack-galera-0" Oct 01 14:00:18 crc kubenswrapper[4605]: I1001 14:00:18.894629 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c414b65e-0cce-4d58-aa5d-08d0679595cd-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"c414b65e-0cce-4d58-aa5d-08d0679595cd\") " pod="openstack/openstack-cell1-galera-0" Oct 01 14:00:18 crc kubenswrapper[4605]: I1001 14:00:18.894657 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/e3e2c30f-0d23-4f2a-8cc5-5bacf9bcce9e-secrets\") pod \"openstack-galera-0\" (UID: \"e3e2c30f-0d23-4f2a-8cc5-5bacf9bcce9e\") " pod="openstack/openstack-galera-0" Oct 01 14:00:18 crc kubenswrapper[4605]: I1001 14:00:18.894680 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3e2c30f-0d23-4f2a-8cc5-5bacf9bcce9e-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"e3e2c30f-0d23-4f2a-8cc5-5bacf9bcce9e\") " pod="openstack/openstack-galera-0" Oct 01 14:00:18 crc kubenswrapper[4605]: I1001 14:00:18.894693 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/c414b65e-0cce-4d58-aa5d-08d0679595cd-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"c414b65e-0cce-4d58-aa5d-08d0679595cd\") " pod="openstack/openstack-cell1-galera-0" Oct 01 14:00:18 crc kubenswrapper[4605]: I1001 14:00:18.894712 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/e3e2c30f-0d23-4f2a-8cc5-5bacf9bcce9e-kolla-config\") pod \"openstack-galera-0\" (UID: \"e3e2c30f-0d23-4f2a-8cc5-5bacf9bcce9e\") " pod="openstack/openstack-galera-0" Oct 01 14:00:18 crc kubenswrapper[4605]: I1001 14:00:18.894730 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q2dfv\" (UniqueName: \"kubernetes.io/projected/e3e2c30f-0d23-4f2a-8cc5-5bacf9bcce9e-kube-api-access-q2dfv\") pod \"openstack-galera-0\" (UID: \"e3e2c30f-0d23-4f2a-8cc5-5bacf9bcce9e\") " pod="openstack/openstack-galera-0" Oct 01 14:00:18 crc kubenswrapper[4605]: I1001 14:00:18.894828 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c414b65e-0cce-4d58-aa5d-08d0679595cd-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"c414b65e-0cce-4d58-aa5d-08d0679595cd\") " pod="openstack/openstack-cell1-galera-0" Oct 01 14:00:18 crc kubenswrapper[4605]: I1001 14:00:18.894859 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-galera-0\" (UID: \"e3e2c30f-0d23-4f2a-8cc5-5bacf9bcce9e\") " pod="openstack/openstack-galera-0" Oct 01 14:00:18 crc kubenswrapper[4605]: I1001 14:00:18.895021 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c414b65e-0cce-4d58-aa5d-08d0679595cd-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"c414b65e-0cce-4d58-aa5d-08d0679595cd\") " pod="openstack/openstack-cell1-galera-0" Oct 01 14:00:18 crc kubenswrapper[4605]: I1001 14:00:18.895050 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/e3e2c30f-0d23-4f2a-8cc5-5bacf9bcce9e-config-data-generated\") pod \"openstack-galera-0\" (UID: \"e3e2c30f-0d23-4f2a-8cc5-5bacf9bcce9e\") " pod="openstack/openstack-galera-0" Oct 01 14:00:18 crc kubenswrapper[4605]: I1001 14:00:18.996583 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/c414b65e-0cce-4d58-aa5d-08d0679595cd-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"c414b65e-0cce-4d58-aa5d-08d0679595cd\") " pod="openstack/openstack-cell1-galera-0" Oct 01 14:00:18 crc kubenswrapper[4605]: I1001 14:00:18.996653 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/e3e2c30f-0d23-4f2a-8cc5-5bacf9bcce9e-secrets\") pod \"openstack-galera-0\" (UID: \"e3e2c30f-0d23-4f2a-8cc5-5bacf9bcce9e\") " pod="openstack/openstack-galera-0" Oct 01 14:00:18 crc kubenswrapper[4605]: I1001 14:00:18.996698 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3e2c30f-0d23-4f2a-8cc5-5bacf9bcce9e-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"e3e2c30f-0d23-4f2a-8cc5-5bacf9bcce9e\") " pod="openstack/openstack-galera-0" Oct 01 14:00:18 crc kubenswrapper[4605]: I1001 14:00:18.996727 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/e3e2c30f-0d23-4f2a-8cc5-5bacf9bcce9e-kolla-config\") pod \"openstack-galera-0\" (UID: \"e3e2c30f-0d23-4f2a-8cc5-5bacf9bcce9e\") " pod="openstack/openstack-galera-0" Oct 01 14:00:18 crc kubenswrapper[4605]: I1001 14:00:18.996771 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q2dfv\" (UniqueName: \"kubernetes.io/projected/e3e2c30f-0d23-4f2a-8cc5-5bacf9bcce9e-kube-api-access-q2dfv\") pod \"openstack-galera-0\" (UID: \"e3e2c30f-0d23-4f2a-8cc5-5bacf9bcce9e\") " pod="openstack/openstack-galera-0" Oct 01 14:00:18 crc kubenswrapper[4605]: I1001 14:00:18.996802 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-galera-0\" (UID: \"e3e2c30f-0d23-4f2a-8cc5-5bacf9bcce9e\") " pod="openstack/openstack-galera-0" Oct 01 14:00:18 crc kubenswrapper[4605]: I1001 14:00:18.996837 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c414b65e-0cce-4d58-aa5d-08d0679595cd-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"c414b65e-0cce-4d58-aa5d-08d0679595cd\") " pod="openstack/openstack-cell1-galera-0" Oct 01 14:00:18 crc kubenswrapper[4605]: I1001 14:00:18.996876 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c414b65e-0cce-4d58-aa5d-08d0679595cd-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"c414b65e-0cce-4d58-aa5d-08d0679595cd\") " pod="openstack/openstack-cell1-galera-0" Oct 01 14:00:18 crc kubenswrapper[4605]: I1001 14:00:18.996932 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/e3e2c30f-0d23-4f2a-8cc5-5bacf9bcce9e-config-data-generated\") pod \"openstack-galera-0\" (UID: \"e3e2c30f-0d23-4f2a-8cc5-5bacf9bcce9e\") " pod="openstack/openstack-galera-0" Oct 01 14:00:18 crc kubenswrapper[4605]: I1001 14:00:18.996959 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/c414b65e-0cce-4d58-aa5d-08d0679595cd-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"c414b65e-0cce-4d58-aa5d-08d0679595cd\") " pod="openstack/openstack-cell1-galera-0" Oct 01 14:00:18 crc kubenswrapper[4605]: I1001 14:00:18.996982 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/e3e2c30f-0d23-4f2a-8cc5-5bacf9bcce9e-config-data-default\") pod \"openstack-galera-0\" (UID: \"e3e2c30f-0d23-4f2a-8cc5-5bacf9bcce9e\") " pod="openstack/openstack-galera-0" Oct 01 14:00:18 crc kubenswrapper[4605]: I1001 14:00:18.997024 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e3e2c30f-0d23-4f2a-8cc5-5bacf9bcce9e-operator-scripts\") pod \"openstack-galera-0\" (UID: \"e3e2c30f-0d23-4f2a-8cc5-5bacf9bcce9e\") " pod="openstack/openstack-galera-0" Oct 01 14:00:18 crc kubenswrapper[4605]: I1001 14:00:18.997106 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"openstack-cell1-galera-0\" (UID: \"c414b65e-0cce-4d58-aa5d-08d0679595cd\") " pod="openstack/openstack-cell1-galera-0" Oct 01 14:00:18 crc kubenswrapper[4605]: I1001 14:00:18.997139 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/c414b65e-0cce-4d58-aa5d-08d0679595cd-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"c414b65e-0cce-4d58-aa5d-08d0679595cd\") " pod="openstack/openstack-cell1-galera-0" Oct 01 14:00:18 crc kubenswrapper[4605]: I1001 14:00:18.997217 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/c414b65e-0cce-4d58-aa5d-08d0679595cd-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"c414b65e-0cce-4d58-aa5d-08d0679595cd\") " pod="openstack/openstack-cell1-galera-0" Oct 01 14:00:18 crc kubenswrapper[4605]: I1001 14:00:18.997272 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/e3e2c30f-0d23-4f2a-8cc5-5bacf9bcce9e-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"e3e2c30f-0d23-4f2a-8cc5-5bacf9bcce9e\") " pod="openstack/openstack-galera-0" Oct 01 14:00:18 crc kubenswrapper[4605]: I1001 14:00:18.997295 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c414b65e-0cce-4d58-aa5d-08d0679595cd-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"c414b65e-0cce-4d58-aa5d-08d0679595cd\") " pod="openstack/openstack-cell1-galera-0" Oct 01 14:00:18 crc kubenswrapper[4605]: I1001 14:00:18.997334 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5pvcl\" (UniqueName: \"kubernetes.io/projected/c414b65e-0cce-4d58-aa5d-08d0679595cd-kube-api-access-5pvcl\") pod \"openstack-cell1-galera-0\" (UID: \"c414b65e-0cce-4d58-aa5d-08d0679595cd\") " pod="openstack/openstack-cell1-galera-0" Oct 01 14:00:19 crc kubenswrapper[4605]: I1001 14:00:19.000124 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/e3e2c30f-0d23-4f2a-8cc5-5bacf9bcce9e-config-data-generated\") pod \"openstack-galera-0\" (UID: \"e3e2c30f-0d23-4f2a-8cc5-5bacf9bcce9e\") " pod="openstack/openstack-galera-0" Oct 01 14:00:19 crc kubenswrapper[4605]: I1001 14:00:19.000367 4605 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-galera-0\" (UID: \"e3e2c30f-0d23-4f2a-8cc5-5bacf9bcce9e\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/openstack-galera-0" Oct 01 14:00:19 crc kubenswrapper[4605]: I1001 14:00:19.003756 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c414b65e-0cce-4d58-aa5d-08d0679595cd-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"c414b65e-0cce-4d58-aa5d-08d0679595cd\") " pod="openstack/openstack-cell1-galera-0" Oct 01 14:00:19 crc kubenswrapper[4605]: I1001 14:00:19.004513 4605 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"openstack-cell1-galera-0\" (UID: \"c414b65e-0cce-4d58-aa5d-08d0679595cd\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/openstack-cell1-galera-0" Oct 01 14:00:19 crc kubenswrapper[4605]: I1001 14:00:19.004763 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/c414b65e-0cce-4d58-aa5d-08d0679595cd-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"c414b65e-0cce-4d58-aa5d-08d0679595cd\") " pod="openstack/openstack-cell1-galera-0" Oct 01 14:00:19 crc kubenswrapper[4605]: I1001 14:00:19.004536 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/e3e2c30f-0d23-4f2a-8cc5-5bacf9bcce9e-kolla-config\") pod \"openstack-galera-0\" (UID: \"e3e2c30f-0d23-4f2a-8cc5-5bacf9bcce9e\") " pod="openstack/openstack-galera-0" Oct 01 14:00:19 crc kubenswrapper[4605]: I1001 14:00:19.008802 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/e3e2c30f-0d23-4f2a-8cc5-5bacf9bcce9e-config-data-default\") pod \"openstack-galera-0\" (UID: \"e3e2c30f-0d23-4f2a-8cc5-5bacf9bcce9e\") " pod="openstack/openstack-galera-0" Oct 01 14:00:19 crc kubenswrapper[4605]: I1001 14:00:19.008894 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e3e2c30f-0d23-4f2a-8cc5-5bacf9bcce9e-operator-scripts\") pod \"openstack-galera-0\" (UID: \"e3e2c30f-0d23-4f2a-8cc5-5bacf9bcce9e\") " pod="openstack/openstack-galera-0" Oct 01 14:00:19 crc kubenswrapper[4605]: I1001 14:00:19.009491 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c414b65e-0cce-4d58-aa5d-08d0679595cd-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"c414b65e-0cce-4d58-aa5d-08d0679595cd\") " pod="openstack/openstack-cell1-galera-0" Oct 01 14:00:19 crc kubenswrapper[4605]: I1001 14:00:19.011002 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/c414b65e-0cce-4d58-aa5d-08d0679595cd-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"c414b65e-0cce-4d58-aa5d-08d0679595cd\") " pod="openstack/openstack-cell1-galera-0" Oct 01 14:00:19 crc kubenswrapper[4605]: I1001 14:00:19.011468 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c414b65e-0cce-4d58-aa5d-08d0679595cd-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"c414b65e-0cce-4d58-aa5d-08d0679595cd\") " pod="openstack/openstack-cell1-galera-0" Oct 01 14:00:19 crc kubenswrapper[4605]: I1001 14:00:19.011859 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/c414b65e-0cce-4d58-aa5d-08d0679595cd-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"c414b65e-0cce-4d58-aa5d-08d0679595cd\") " pod="openstack/openstack-cell1-galera-0" Oct 01 14:00:19 crc kubenswrapper[4605]: I1001 14:00:19.012477 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/e3e2c30f-0d23-4f2a-8cc5-5bacf9bcce9e-secrets\") pod \"openstack-galera-0\" (UID: \"e3e2c30f-0d23-4f2a-8cc5-5bacf9bcce9e\") " pod="openstack/openstack-galera-0" Oct 01 14:00:19 crc kubenswrapper[4605]: I1001 14:00:19.016663 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3e2c30f-0d23-4f2a-8cc5-5bacf9bcce9e-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"e3e2c30f-0d23-4f2a-8cc5-5bacf9bcce9e\") " pod="openstack/openstack-galera-0" Oct 01 14:00:19 crc kubenswrapper[4605]: I1001 14:00:19.016929 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/e3e2c30f-0d23-4f2a-8cc5-5bacf9bcce9e-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"e3e2c30f-0d23-4f2a-8cc5-5bacf9bcce9e\") " pod="openstack/openstack-galera-0" Oct 01 14:00:19 crc kubenswrapper[4605]: I1001 14:00:19.028464 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5pvcl\" (UniqueName: \"kubernetes.io/projected/c414b65e-0cce-4d58-aa5d-08d0679595cd-kube-api-access-5pvcl\") pod \"openstack-cell1-galera-0\" (UID: \"c414b65e-0cce-4d58-aa5d-08d0679595cd\") " pod="openstack/openstack-cell1-galera-0" Oct 01 14:00:19 crc kubenswrapper[4605]: I1001 14:00:19.028575 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/c414b65e-0cce-4d58-aa5d-08d0679595cd-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"c414b65e-0cce-4d58-aa5d-08d0679595cd\") " pod="openstack/openstack-cell1-galera-0" Oct 01 14:00:19 crc kubenswrapper[4605]: I1001 14:00:19.034440 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q2dfv\" (UniqueName: \"kubernetes.io/projected/e3e2c30f-0d23-4f2a-8cc5-5bacf9bcce9e-kube-api-access-q2dfv\") pod \"openstack-galera-0\" (UID: \"e3e2c30f-0d23-4f2a-8cc5-5bacf9bcce9e\") " pod="openstack/openstack-galera-0" Oct 01 14:00:19 crc kubenswrapper[4605]: I1001 14:00:19.037812 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"openstack-cell1-galera-0\" (UID: \"c414b65e-0cce-4d58-aa5d-08d0679595cd\") " pod="openstack/openstack-cell1-galera-0" Oct 01 14:00:19 crc kubenswrapper[4605]: I1001 14:00:19.042050 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-galera-0\" (UID: \"e3e2c30f-0d23-4f2a-8cc5-5bacf9bcce9e\") " pod="openstack/openstack-galera-0" Oct 01 14:00:19 crc kubenswrapper[4605]: I1001 14:00:19.092472 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Oct 01 14:00:19 crc kubenswrapper[4605]: I1001 14:00:19.166028 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Oct 01 14:00:19 crc kubenswrapper[4605]: I1001 14:00:19.817578 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Oct 01 14:00:19 crc kubenswrapper[4605]: I1001 14:00:19.818882 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Oct 01 14:00:19 crc kubenswrapper[4605]: I1001 14:00:19.829339 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-txp56" Oct 01 14:00:19 crc kubenswrapper[4605]: I1001 14:00:19.829444 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Oct 01 14:00:19 crc kubenswrapper[4605]: I1001 14:00:19.829521 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Oct 01 14:00:19 crc kubenswrapper[4605]: I1001 14:00:19.843891 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Oct 01 14:00:19 crc kubenswrapper[4605]: I1001 14:00:19.951250 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Oct 01 14:00:19 crc kubenswrapper[4605]: I1001 14:00:19.966561 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/cfe08201-447e-4697-95cb-dfdf59dfdbe9-memcached-tls-certs\") pod \"memcached-0\" (UID: \"cfe08201-447e-4697-95cb-dfdf59dfdbe9\") " pod="openstack/memcached-0" Oct 01 14:00:19 crc kubenswrapper[4605]: I1001 14:00:19.966884 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-88sdf\" (UniqueName: \"kubernetes.io/projected/cfe08201-447e-4697-95cb-dfdf59dfdbe9-kube-api-access-88sdf\") pod \"memcached-0\" (UID: \"cfe08201-447e-4697-95cb-dfdf59dfdbe9\") " pod="openstack/memcached-0" Oct 01 14:00:19 crc kubenswrapper[4605]: I1001 14:00:19.967142 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cfe08201-447e-4697-95cb-dfdf59dfdbe9-combined-ca-bundle\") pod \"memcached-0\" (UID: \"cfe08201-447e-4697-95cb-dfdf59dfdbe9\") " pod="openstack/memcached-0" Oct 01 14:00:19 crc kubenswrapper[4605]: I1001 14:00:19.967249 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/cfe08201-447e-4697-95cb-dfdf59dfdbe9-kolla-config\") pod \"memcached-0\" (UID: \"cfe08201-447e-4697-95cb-dfdf59dfdbe9\") " pod="openstack/memcached-0" Oct 01 14:00:19 crc kubenswrapper[4605]: I1001 14:00:19.967348 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/cfe08201-447e-4697-95cb-dfdf59dfdbe9-config-data\") pod \"memcached-0\" (UID: \"cfe08201-447e-4697-95cb-dfdf59dfdbe9\") " pod="openstack/memcached-0" Oct 01 14:00:20 crc kubenswrapper[4605]: I1001 14:00:20.073934 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-88sdf\" (UniqueName: \"kubernetes.io/projected/cfe08201-447e-4697-95cb-dfdf59dfdbe9-kube-api-access-88sdf\") pod \"memcached-0\" (UID: \"cfe08201-447e-4697-95cb-dfdf59dfdbe9\") " pod="openstack/memcached-0" Oct 01 14:00:20 crc kubenswrapper[4605]: I1001 14:00:20.078841 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cfe08201-447e-4697-95cb-dfdf59dfdbe9-combined-ca-bundle\") pod \"memcached-0\" (UID: \"cfe08201-447e-4697-95cb-dfdf59dfdbe9\") " pod="openstack/memcached-0" Oct 01 14:00:20 crc kubenswrapper[4605]: I1001 14:00:20.078896 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/cfe08201-447e-4697-95cb-dfdf59dfdbe9-kolla-config\") pod \"memcached-0\" (UID: \"cfe08201-447e-4697-95cb-dfdf59dfdbe9\") " pod="openstack/memcached-0" Oct 01 14:00:20 crc kubenswrapper[4605]: I1001 14:00:20.078977 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/cfe08201-447e-4697-95cb-dfdf59dfdbe9-config-data\") pod \"memcached-0\" (UID: \"cfe08201-447e-4697-95cb-dfdf59dfdbe9\") " pod="openstack/memcached-0" Oct 01 14:00:20 crc kubenswrapper[4605]: I1001 14:00:20.080325 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/cfe08201-447e-4697-95cb-dfdf59dfdbe9-kolla-config\") pod \"memcached-0\" (UID: \"cfe08201-447e-4697-95cb-dfdf59dfdbe9\") " pod="openstack/memcached-0" Oct 01 14:00:20 crc kubenswrapper[4605]: I1001 14:00:20.080452 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/cfe08201-447e-4697-95cb-dfdf59dfdbe9-memcached-tls-certs\") pod \"memcached-0\" (UID: \"cfe08201-447e-4697-95cb-dfdf59dfdbe9\") " pod="openstack/memcached-0" Oct 01 14:00:20 crc kubenswrapper[4605]: I1001 14:00:20.085025 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/cfe08201-447e-4697-95cb-dfdf59dfdbe9-config-data\") pod \"memcached-0\" (UID: \"cfe08201-447e-4697-95cb-dfdf59dfdbe9\") " pod="openstack/memcached-0" Oct 01 14:00:20 crc kubenswrapper[4605]: I1001 14:00:20.095487 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/cfe08201-447e-4697-95cb-dfdf59dfdbe9-memcached-tls-certs\") pod \"memcached-0\" (UID: \"cfe08201-447e-4697-95cb-dfdf59dfdbe9\") " pod="openstack/memcached-0" Oct 01 14:00:20 crc kubenswrapper[4605]: I1001 14:00:20.097814 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-88sdf\" (UniqueName: \"kubernetes.io/projected/cfe08201-447e-4697-95cb-dfdf59dfdbe9-kube-api-access-88sdf\") pod \"memcached-0\" (UID: \"cfe08201-447e-4697-95cb-dfdf59dfdbe9\") " pod="openstack/memcached-0" Oct 01 14:00:20 crc kubenswrapper[4605]: I1001 14:00:20.108047 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cfe08201-447e-4697-95cb-dfdf59dfdbe9-combined-ca-bundle\") pod \"memcached-0\" (UID: \"cfe08201-447e-4697-95cb-dfdf59dfdbe9\") " pod="openstack/memcached-0" Oct 01 14:00:20 crc kubenswrapper[4605]: I1001 14:00:20.154443 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Oct 01 14:00:20 crc kubenswrapper[4605]: I1001 14:00:20.308179 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 01 14:00:20 crc kubenswrapper[4605]: W1001 14:00:20.348647 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc414b65e_0cce_4d58_aa5d_08d0679595cd.slice/crio-83a923b201e64498689c6a6d60ff344ee52e274411e68eac4b9debfb230a257e WatchSource:0}: Error finding container 83a923b201e64498689c6a6d60ff344ee52e274411e68eac4b9debfb230a257e: Status 404 returned error can't find the container with id 83a923b201e64498689c6a6d60ff344ee52e274411e68eac4b9debfb230a257e Oct 01 14:00:20 crc kubenswrapper[4605]: I1001 14:00:20.382564 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"c414b65e-0cce-4d58-aa5d-08d0679595cd","Type":"ContainerStarted","Data":"83a923b201e64498689c6a6d60ff344ee52e274411e68eac4b9debfb230a257e"} Oct 01 14:00:20 crc kubenswrapper[4605]: I1001 14:00:20.385064 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"e3e2c30f-0d23-4f2a-8cc5-5bacf9bcce9e","Type":"ContainerStarted","Data":"7431bb4b17c9fee51be4965e6f4dbe44d0e3cca04776860c5ba4580483b9eef4"} Oct 01 14:00:20 crc kubenswrapper[4605]: I1001 14:00:20.857524 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Oct 01 14:00:21 crc kubenswrapper[4605]: I1001 14:00:21.453728 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Oct 01 14:00:21 crc kubenswrapper[4605]: I1001 14:00:21.454903 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 01 14:00:21 crc kubenswrapper[4605]: I1001 14:00:21.458550 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-n86pc" Oct 01 14:00:21 crc kubenswrapper[4605]: I1001 14:00:21.489353 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 01 14:00:21 crc kubenswrapper[4605]: I1001 14:00:21.619846 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mcvhs\" (UniqueName: \"kubernetes.io/projected/fd313e13-49ce-49c2-be82-6ca3c0fbb2ed-kube-api-access-mcvhs\") pod \"kube-state-metrics-0\" (UID: \"fd313e13-49ce-49c2-be82-6ca3c0fbb2ed\") " pod="openstack/kube-state-metrics-0" Oct 01 14:00:21 crc kubenswrapper[4605]: I1001 14:00:21.631644 4605 patch_prober.go:28] interesting pod/machine-config-daemon-zdjh7 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 14:00:21 crc kubenswrapper[4605]: I1001 14:00:21.631966 4605 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 14:00:21 crc kubenswrapper[4605]: I1001 14:00:21.721378 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mcvhs\" (UniqueName: \"kubernetes.io/projected/fd313e13-49ce-49c2-be82-6ca3c0fbb2ed-kube-api-access-mcvhs\") pod \"kube-state-metrics-0\" (UID: \"fd313e13-49ce-49c2-be82-6ca3c0fbb2ed\") " pod="openstack/kube-state-metrics-0" Oct 01 14:00:21 crc kubenswrapper[4605]: I1001 14:00:21.748689 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mcvhs\" (UniqueName: \"kubernetes.io/projected/fd313e13-49ce-49c2-be82-6ca3c0fbb2ed-kube-api-access-mcvhs\") pod \"kube-state-metrics-0\" (UID: \"fd313e13-49ce-49c2-be82-6ca3c0fbb2ed\") " pod="openstack/kube-state-metrics-0" Oct 01 14:00:21 crc kubenswrapper[4605]: I1001 14:00:21.789885 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 01 14:00:25 crc kubenswrapper[4605]: I1001 14:00:25.569151 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-jvb44"] Oct 01 14:00:25 crc kubenswrapper[4605]: I1001 14:00:25.570348 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-jvb44" Oct 01 14:00:25 crc kubenswrapper[4605]: I1001 14:00:25.577267 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-vhwbf" Oct 01 14:00:25 crc kubenswrapper[4605]: I1001 14:00:25.577451 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Oct 01 14:00:25 crc kubenswrapper[4605]: I1001 14:00:25.577252 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Oct 01 14:00:25 crc kubenswrapper[4605]: I1001 14:00:25.608386 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-jvb44"] Oct 01 14:00:25 crc kubenswrapper[4605]: I1001 14:00:25.623976 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-csqtk"] Oct 01 14:00:25 crc kubenswrapper[4605]: I1001 14:00:25.625453 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-csqtk" Oct 01 14:00:25 crc kubenswrapper[4605]: I1001 14:00:25.691235 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-csqtk"] Oct 01 14:00:25 crc kubenswrapper[4605]: I1001 14:00:25.693595 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a37367ae-0e7b-4ad1-afb4-c48ca6282706-scripts\") pod \"ovn-controller-jvb44\" (UID: \"a37367ae-0e7b-4ad1-afb4-c48ca6282706\") " pod="openstack/ovn-controller-jvb44" Oct 01 14:00:25 crc kubenswrapper[4605]: I1001 14:00:25.693663 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/a37367ae-0e7b-4ad1-afb4-c48ca6282706-var-run\") pod \"ovn-controller-jvb44\" (UID: \"a37367ae-0e7b-4ad1-afb4-c48ca6282706\") " pod="openstack/ovn-controller-jvb44" Oct 01 14:00:25 crc kubenswrapper[4605]: I1001 14:00:25.693836 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/a37367ae-0e7b-4ad1-afb4-c48ca6282706-var-log-ovn\") pod \"ovn-controller-jvb44\" (UID: \"a37367ae-0e7b-4ad1-afb4-c48ca6282706\") " pod="openstack/ovn-controller-jvb44" Oct 01 14:00:25 crc kubenswrapper[4605]: I1001 14:00:25.693866 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a37367ae-0e7b-4ad1-afb4-c48ca6282706-combined-ca-bundle\") pod \"ovn-controller-jvb44\" (UID: \"a37367ae-0e7b-4ad1-afb4-c48ca6282706\") " pod="openstack/ovn-controller-jvb44" Oct 01 14:00:25 crc kubenswrapper[4605]: I1001 14:00:25.693891 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/a37367ae-0e7b-4ad1-afb4-c48ca6282706-var-run-ovn\") pod \"ovn-controller-jvb44\" (UID: \"a37367ae-0e7b-4ad1-afb4-c48ca6282706\") " pod="openstack/ovn-controller-jvb44" Oct 01 14:00:25 crc kubenswrapper[4605]: I1001 14:00:25.693911 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/a37367ae-0e7b-4ad1-afb4-c48ca6282706-ovn-controller-tls-certs\") pod \"ovn-controller-jvb44\" (UID: \"a37367ae-0e7b-4ad1-afb4-c48ca6282706\") " pod="openstack/ovn-controller-jvb44" Oct 01 14:00:25 crc kubenswrapper[4605]: I1001 14:00:25.693948 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8h8cv\" (UniqueName: \"kubernetes.io/projected/a37367ae-0e7b-4ad1-afb4-c48ca6282706-kube-api-access-8h8cv\") pod \"ovn-controller-jvb44\" (UID: \"a37367ae-0e7b-4ad1-afb4-c48ca6282706\") " pod="openstack/ovn-controller-jvb44" Oct 01 14:00:25 crc kubenswrapper[4605]: I1001 14:00:25.795430 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/9e9376a7-1282-4f2c-b437-bf6eb57d2739-var-lib\") pod \"ovn-controller-ovs-csqtk\" (UID: \"9e9376a7-1282-4f2c-b437-bf6eb57d2739\") " pod="openstack/ovn-controller-ovs-csqtk" Oct 01 14:00:25 crc kubenswrapper[4605]: I1001 14:00:25.795496 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/a37367ae-0e7b-4ad1-afb4-c48ca6282706-var-run-ovn\") pod \"ovn-controller-jvb44\" (UID: \"a37367ae-0e7b-4ad1-afb4-c48ca6282706\") " pod="openstack/ovn-controller-jvb44" Oct 01 14:00:25 crc kubenswrapper[4605]: I1001 14:00:25.795525 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/9e9376a7-1282-4f2c-b437-bf6eb57d2739-var-log\") pod \"ovn-controller-ovs-csqtk\" (UID: \"9e9376a7-1282-4f2c-b437-bf6eb57d2739\") " pod="openstack/ovn-controller-ovs-csqtk" Oct 01 14:00:25 crc kubenswrapper[4605]: I1001 14:00:25.795555 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/a37367ae-0e7b-4ad1-afb4-c48ca6282706-ovn-controller-tls-certs\") pod \"ovn-controller-jvb44\" (UID: \"a37367ae-0e7b-4ad1-afb4-c48ca6282706\") " pod="openstack/ovn-controller-jvb44" Oct 01 14:00:25 crc kubenswrapper[4605]: I1001 14:00:25.795609 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/9e9376a7-1282-4f2c-b437-bf6eb57d2739-etc-ovs\") pod \"ovn-controller-ovs-csqtk\" (UID: \"9e9376a7-1282-4f2c-b437-bf6eb57d2739\") " pod="openstack/ovn-controller-ovs-csqtk" Oct 01 14:00:25 crc kubenswrapper[4605]: I1001 14:00:25.795631 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8h8cv\" (UniqueName: \"kubernetes.io/projected/a37367ae-0e7b-4ad1-afb4-c48ca6282706-kube-api-access-8h8cv\") pod \"ovn-controller-jvb44\" (UID: \"a37367ae-0e7b-4ad1-afb4-c48ca6282706\") " pod="openstack/ovn-controller-jvb44" Oct 01 14:00:25 crc kubenswrapper[4605]: I1001 14:00:25.795659 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qzfjt\" (UniqueName: \"kubernetes.io/projected/9e9376a7-1282-4f2c-b437-bf6eb57d2739-kube-api-access-qzfjt\") pod \"ovn-controller-ovs-csqtk\" (UID: \"9e9376a7-1282-4f2c-b437-bf6eb57d2739\") " pod="openstack/ovn-controller-ovs-csqtk" Oct 01 14:00:25 crc kubenswrapper[4605]: I1001 14:00:25.795687 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9e9376a7-1282-4f2c-b437-bf6eb57d2739-scripts\") pod \"ovn-controller-ovs-csqtk\" (UID: \"9e9376a7-1282-4f2c-b437-bf6eb57d2739\") " pod="openstack/ovn-controller-ovs-csqtk" Oct 01 14:00:25 crc kubenswrapper[4605]: I1001 14:00:25.795722 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a37367ae-0e7b-4ad1-afb4-c48ca6282706-scripts\") pod \"ovn-controller-jvb44\" (UID: \"a37367ae-0e7b-4ad1-afb4-c48ca6282706\") " pod="openstack/ovn-controller-jvb44" Oct 01 14:00:25 crc kubenswrapper[4605]: I1001 14:00:25.795756 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/a37367ae-0e7b-4ad1-afb4-c48ca6282706-var-run\") pod \"ovn-controller-jvb44\" (UID: \"a37367ae-0e7b-4ad1-afb4-c48ca6282706\") " pod="openstack/ovn-controller-jvb44" Oct 01 14:00:25 crc kubenswrapper[4605]: I1001 14:00:25.795811 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/a37367ae-0e7b-4ad1-afb4-c48ca6282706-var-log-ovn\") pod \"ovn-controller-jvb44\" (UID: \"a37367ae-0e7b-4ad1-afb4-c48ca6282706\") " pod="openstack/ovn-controller-jvb44" Oct 01 14:00:25 crc kubenswrapper[4605]: I1001 14:00:25.795840 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/9e9376a7-1282-4f2c-b437-bf6eb57d2739-var-run\") pod \"ovn-controller-ovs-csqtk\" (UID: \"9e9376a7-1282-4f2c-b437-bf6eb57d2739\") " pod="openstack/ovn-controller-ovs-csqtk" Oct 01 14:00:25 crc kubenswrapper[4605]: I1001 14:00:25.795875 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a37367ae-0e7b-4ad1-afb4-c48ca6282706-combined-ca-bundle\") pod \"ovn-controller-jvb44\" (UID: \"a37367ae-0e7b-4ad1-afb4-c48ca6282706\") " pod="openstack/ovn-controller-jvb44" Oct 01 14:00:25 crc kubenswrapper[4605]: I1001 14:00:25.798270 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/a37367ae-0e7b-4ad1-afb4-c48ca6282706-var-run-ovn\") pod \"ovn-controller-jvb44\" (UID: \"a37367ae-0e7b-4ad1-afb4-c48ca6282706\") " pod="openstack/ovn-controller-jvb44" Oct 01 14:00:25 crc kubenswrapper[4605]: I1001 14:00:25.798600 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/a37367ae-0e7b-4ad1-afb4-c48ca6282706-var-run\") pod \"ovn-controller-jvb44\" (UID: \"a37367ae-0e7b-4ad1-afb4-c48ca6282706\") " pod="openstack/ovn-controller-jvb44" Oct 01 14:00:25 crc kubenswrapper[4605]: I1001 14:00:25.798738 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/a37367ae-0e7b-4ad1-afb4-c48ca6282706-var-log-ovn\") pod \"ovn-controller-jvb44\" (UID: \"a37367ae-0e7b-4ad1-afb4-c48ca6282706\") " pod="openstack/ovn-controller-jvb44" Oct 01 14:00:25 crc kubenswrapper[4605]: I1001 14:00:25.800352 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a37367ae-0e7b-4ad1-afb4-c48ca6282706-scripts\") pod \"ovn-controller-jvb44\" (UID: \"a37367ae-0e7b-4ad1-afb4-c48ca6282706\") " pod="openstack/ovn-controller-jvb44" Oct 01 14:00:25 crc kubenswrapper[4605]: I1001 14:00:25.813544 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/a37367ae-0e7b-4ad1-afb4-c48ca6282706-ovn-controller-tls-certs\") pod \"ovn-controller-jvb44\" (UID: \"a37367ae-0e7b-4ad1-afb4-c48ca6282706\") " pod="openstack/ovn-controller-jvb44" Oct 01 14:00:25 crc kubenswrapper[4605]: I1001 14:00:25.821469 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a37367ae-0e7b-4ad1-afb4-c48ca6282706-combined-ca-bundle\") pod \"ovn-controller-jvb44\" (UID: \"a37367ae-0e7b-4ad1-afb4-c48ca6282706\") " pod="openstack/ovn-controller-jvb44" Oct 01 14:00:25 crc kubenswrapper[4605]: I1001 14:00:25.840157 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8h8cv\" (UniqueName: \"kubernetes.io/projected/a37367ae-0e7b-4ad1-afb4-c48ca6282706-kube-api-access-8h8cv\") pod \"ovn-controller-jvb44\" (UID: \"a37367ae-0e7b-4ad1-afb4-c48ca6282706\") " pod="openstack/ovn-controller-jvb44" Oct 01 14:00:25 crc kubenswrapper[4605]: I1001 14:00:25.892654 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-jvb44" Oct 01 14:00:25 crc kubenswrapper[4605]: I1001 14:00:25.898872 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/9e9376a7-1282-4f2c-b437-bf6eb57d2739-var-run\") pod \"ovn-controller-ovs-csqtk\" (UID: \"9e9376a7-1282-4f2c-b437-bf6eb57d2739\") " pod="openstack/ovn-controller-ovs-csqtk" Oct 01 14:00:25 crc kubenswrapper[4605]: I1001 14:00:25.899064 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/9e9376a7-1282-4f2c-b437-bf6eb57d2739-var-lib\") pod \"ovn-controller-ovs-csqtk\" (UID: \"9e9376a7-1282-4f2c-b437-bf6eb57d2739\") " pod="openstack/ovn-controller-ovs-csqtk" Oct 01 14:00:25 crc kubenswrapper[4605]: I1001 14:00:25.899085 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/9e9376a7-1282-4f2c-b437-bf6eb57d2739-var-log\") pod \"ovn-controller-ovs-csqtk\" (UID: \"9e9376a7-1282-4f2c-b437-bf6eb57d2739\") " pod="openstack/ovn-controller-ovs-csqtk" Oct 01 14:00:25 crc kubenswrapper[4605]: I1001 14:00:25.899146 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/9e9376a7-1282-4f2c-b437-bf6eb57d2739-etc-ovs\") pod \"ovn-controller-ovs-csqtk\" (UID: \"9e9376a7-1282-4f2c-b437-bf6eb57d2739\") " pod="openstack/ovn-controller-ovs-csqtk" Oct 01 14:00:25 crc kubenswrapper[4605]: I1001 14:00:25.899168 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qzfjt\" (UniqueName: \"kubernetes.io/projected/9e9376a7-1282-4f2c-b437-bf6eb57d2739-kube-api-access-qzfjt\") pod \"ovn-controller-ovs-csqtk\" (UID: \"9e9376a7-1282-4f2c-b437-bf6eb57d2739\") " pod="openstack/ovn-controller-ovs-csqtk" Oct 01 14:00:25 crc kubenswrapper[4605]: I1001 14:00:25.899184 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9e9376a7-1282-4f2c-b437-bf6eb57d2739-scripts\") pod \"ovn-controller-ovs-csqtk\" (UID: \"9e9376a7-1282-4f2c-b437-bf6eb57d2739\") " pod="openstack/ovn-controller-ovs-csqtk" Oct 01 14:00:25 crc kubenswrapper[4605]: I1001 14:00:25.900398 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/9e9376a7-1282-4f2c-b437-bf6eb57d2739-var-run\") pod \"ovn-controller-ovs-csqtk\" (UID: \"9e9376a7-1282-4f2c-b437-bf6eb57d2739\") " pod="openstack/ovn-controller-ovs-csqtk" Oct 01 14:00:25 crc kubenswrapper[4605]: I1001 14:00:25.900471 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/9e9376a7-1282-4f2c-b437-bf6eb57d2739-var-lib\") pod \"ovn-controller-ovs-csqtk\" (UID: \"9e9376a7-1282-4f2c-b437-bf6eb57d2739\") " pod="openstack/ovn-controller-ovs-csqtk" Oct 01 14:00:25 crc kubenswrapper[4605]: I1001 14:00:25.900504 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/9e9376a7-1282-4f2c-b437-bf6eb57d2739-var-log\") pod \"ovn-controller-ovs-csqtk\" (UID: \"9e9376a7-1282-4f2c-b437-bf6eb57d2739\") " pod="openstack/ovn-controller-ovs-csqtk" Oct 01 14:00:25 crc kubenswrapper[4605]: I1001 14:00:25.900634 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/9e9376a7-1282-4f2c-b437-bf6eb57d2739-etc-ovs\") pod \"ovn-controller-ovs-csqtk\" (UID: \"9e9376a7-1282-4f2c-b437-bf6eb57d2739\") " pod="openstack/ovn-controller-ovs-csqtk" Oct 01 14:00:25 crc kubenswrapper[4605]: I1001 14:00:25.901386 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9e9376a7-1282-4f2c-b437-bf6eb57d2739-scripts\") pod \"ovn-controller-ovs-csqtk\" (UID: \"9e9376a7-1282-4f2c-b437-bf6eb57d2739\") " pod="openstack/ovn-controller-ovs-csqtk" Oct 01 14:00:25 crc kubenswrapper[4605]: I1001 14:00:25.915209 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qzfjt\" (UniqueName: \"kubernetes.io/projected/9e9376a7-1282-4f2c-b437-bf6eb57d2739-kube-api-access-qzfjt\") pod \"ovn-controller-ovs-csqtk\" (UID: \"9e9376a7-1282-4f2c-b437-bf6eb57d2739\") " pod="openstack/ovn-controller-ovs-csqtk" Oct 01 14:00:25 crc kubenswrapper[4605]: I1001 14:00:25.966262 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-csqtk" Oct 01 14:00:26 crc kubenswrapper[4605]: I1001 14:00:26.435400 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 01 14:00:26 crc kubenswrapper[4605]: I1001 14:00:26.436759 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Oct 01 14:00:26 crc kubenswrapper[4605]: I1001 14:00:26.439583 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Oct 01 14:00:26 crc kubenswrapper[4605]: I1001 14:00:26.439774 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Oct 01 14:00:26 crc kubenswrapper[4605]: I1001 14:00:26.440669 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-xdmq6" Oct 01 14:00:26 crc kubenswrapper[4605]: I1001 14:00:26.440838 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Oct 01 14:00:26 crc kubenswrapper[4605]: I1001 14:00:26.444978 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Oct 01 14:00:26 crc kubenswrapper[4605]: I1001 14:00:26.455483 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 01 14:00:26 crc kubenswrapper[4605]: I1001 14:00:26.611752 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/70acc9bd-54b2-4c70-bf3f-ce66a88bbd06-config\") pod \"ovsdbserver-nb-0\" (UID: \"70acc9bd-54b2-4c70-bf3f-ce66a88bbd06\") " pod="openstack/ovsdbserver-nb-0" Oct 01 14:00:26 crc kubenswrapper[4605]: I1001 14:00:26.611824 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/70acc9bd-54b2-4c70-bf3f-ce66a88bbd06-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"70acc9bd-54b2-4c70-bf3f-ce66a88bbd06\") " pod="openstack/ovsdbserver-nb-0" Oct 01 14:00:26 crc kubenswrapper[4605]: I1001 14:00:26.611850 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/70acc9bd-54b2-4c70-bf3f-ce66a88bbd06-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"70acc9bd-54b2-4c70-bf3f-ce66a88bbd06\") " pod="openstack/ovsdbserver-nb-0" Oct 01 14:00:26 crc kubenswrapper[4605]: I1001 14:00:26.611924 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/70acc9bd-54b2-4c70-bf3f-ce66a88bbd06-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"70acc9bd-54b2-4c70-bf3f-ce66a88bbd06\") " pod="openstack/ovsdbserver-nb-0" Oct 01 14:00:26 crc kubenswrapper[4605]: I1001 14:00:26.611974 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qz7zv\" (UniqueName: \"kubernetes.io/projected/70acc9bd-54b2-4c70-bf3f-ce66a88bbd06-kube-api-access-qz7zv\") pod \"ovsdbserver-nb-0\" (UID: \"70acc9bd-54b2-4c70-bf3f-ce66a88bbd06\") " pod="openstack/ovsdbserver-nb-0" Oct 01 14:00:26 crc kubenswrapper[4605]: I1001 14:00:26.612003 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/70acc9bd-54b2-4c70-bf3f-ce66a88bbd06-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"70acc9bd-54b2-4c70-bf3f-ce66a88bbd06\") " pod="openstack/ovsdbserver-nb-0" Oct 01 14:00:26 crc kubenswrapper[4605]: I1001 14:00:26.612021 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"ovsdbserver-nb-0\" (UID: \"70acc9bd-54b2-4c70-bf3f-ce66a88bbd06\") " pod="openstack/ovsdbserver-nb-0" Oct 01 14:00:26 crc kubenswrapper[4605]: I1001 14:00:26.612140 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70acc9bd-54b2-4c70-bf3f-ce66a88bbd06-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"70acc9bd-54b2-4c70-bf3f-ce66a88bbd06\") " pod="openstack/ovsdbserver-nb-0" Oct 01 14:00:26 crc kubenswrapper[4605]: I1001 14:00:26.713606 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/70acc9bd-54b2-4c70-bf3f-ce66a88bbd06-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"70acc9bd-54b2-4c70-bf3f-ce66a88bbd06\") " pod="openstack/ovsdbserver-nb-0" Oct 01 14:00:26 crc kubenswrapper[4605]: I1001 14:00:26.713664 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/70acc9bd-54b2-4c70-bf3f-ce66a88bbd06-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"70acc9bd-54b2-4c70-bf3f-ce66a88bbd06\") " pod="openstack/ovsdbserver-nb-0" Oct 01 14:00:26 crc kubenswrapper[4605]: I1001 14:00:26.713701 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/70acc9bd-54b2-4c70-bf3f-ce66a88bbd06-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"70acc9bd-54b2-4c70-bf3f-ce66a88bbd06\") " pod="openstack/ovsdbserver-nb-0" Oct 01 14:00:26 crc kubenswrapper[4605]: I1001 14:00:26.713740 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qz7zv\" (UniqueName: \"kubernetes.io/projected/70acc9bd-54b2-4c70-bf3f-ce66a88bbd06-kube-api-access-qz7zv\") pod \"ovsdbserver-nb-0\" (UID: \"70acc9bd-54b2-4c70-bf3f-ce66a88bbd06\") " pod="openstack/ovsdbserver-nb-0" Oct 01 14:00:26 crc kubenswrapper[4605]: I1001 14:00:26.713770 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/70acc9bd-54b2-4c70-bf3f-ce66a88bbd06-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"70acc9bd-54b2-4c70-bf3f-ce66a88bbd06\") " pod="openstack/ovsdbserver-nb-0" Oct 01 14:00:26 crc kubenswrapper[4605]: I1001 14:00:26.713790 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"ovsdbserver-nb-0\" (UID: \"70acc9bd-54b2-4c70-bf3f-ce66a88bbd06\") " pod="openstack/ovsdbserver-nb-0" Oct 01 14:00:26 crc kubenswrapper[4605]: I1001 14:00:26.713816 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70acc9bd-54b2-4c70-bf3f-ce66a88bbd06-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"70acc9bd-54b2-4c70-bf3f-ce66a88bbd06\") " pod="openstack/ovsdbserver-nb-0" Oct 01 14:00:26 crc kubenswrapper[4605]: I1001 14:00:26.713904 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/70acc9bd-54b2-4c70-bf3f-ce66a88bbd06-config\") pod \"ovsdbserver-nb-0\" (UID: \"70acc9bd-54b2-4c70-bf3f-ce66a88bbd06\") " pod="openstack/ovsdbserver-nb-0" Oct 01 14:00:26 crc kubenswrapper[4605]: I1001 14:00:26.714232 4605 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"ovsdbserver-nb-0\" (UID: \"70acc9bd-54b2-4c70-bf3f-ce66a88bbd06\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/ovsdbserver-nb-0" Oct 01 14:00:26 crc kubenswrapper[4605]: I1001 14:00:26.714347 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/70acc9bd-54b2-4c70-bf3f-ce66a88bbd06-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"70acc9bd-54b2-4c70-bf3f-ce66a88bbd06\") " pod="openstack/ovsdbserver-nb-0" Oct 01 14:00:26 crc kubenswrapper[4605]: I1001 14:00:26.714951 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/70acc9bd-54b2-4c70-bf3f-ce66a88bbd06-config\") pod \"ovsdbserver-nb-0\" (UID: \"70acc9bd-54b2-4c70-bf3f-ce66a88bbd06\") " pod="openstack/ovsdbserver-nb-0" Oct 01 14:00:26 crc kubenswrapper[4605]: I1001 14:00:26.715187 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/70acc9bd-54b2-4c70-bf3f-ce66a88bbd06-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"70acc9bd-54b2-4c70-bf3f-ce66a88bbd06\") " pod="openstack/ovsdbserver-nb-0" Oct 01 14:00:26 crc kubenswrapper[4605]: I1001 14:00:26.719835 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/70acc9bd-54b2-4c70-bf3f-ce66a88bbd06-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"70acc9bd-54b2-4c70-bf3f-ce66a88bbd06\") " pod="openstack/ovsdbserver-nb-0" Oct 01 14:00:26 crc kubenswrapper[4605]: I1001 14:00:26.720524 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70acc9bd-54b2-4c70-bf3f-ce66a88bbd06-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"70acc9bd-54b2-4c70-bf3f-ce66a88bbd06\") " pod="openstack/ovsdbserver-nb-0" Oct 01 14:00:26 crc kubenswrapper[4605]: I1001 14:00:26.720593 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/70acc9bd-54b2-4c70-bf3f-ce66a88bbd06-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"70acc9bd-54b2-4c70-bf3f-ce66a88bbd06\") " pod="openstack/ovsdbserver-nb-0" Oct 01 14:00:26 crc kubenswrapper[4605]: I1001 14:00:26.732612 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qz7zv\" (UniqueName: \"kubernetes.io/projected/70acc9bd-54b2-4c70-bf3f-ce66a88bbd06-kube-api-access-qz7zv\") pod \"ovsdbserver-nb-0\" (UID: \"70acc9bd-54b2-4c70-bf3f-ce66a88bbd06\") " pod="openstack/ovsdbserver-nb-0" Oct 01 14:00:26 crc kubenswrapper[4605]: I1001 14:00:26.746466 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"ovsdbserver-nb-0\" (UID: \"70acc9bd-54b2-4c70-bf3f-ce66a88bbd06\") " pod="openstack/ovsdbserver-nb-0" Oct 01 14:00:26 crc kubenswrapper[4605]: I1001 14:00:26.761788 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Oct 01 14:00:28 crc kubenswrapper[4605]: I1001 14:00:28.082920 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 01 14:00:28 crc kubenswrapper[4605]: I1001 14:00:28.084566 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Oct 01 14:00:28 crc kubenswrapper[4605]: I1001 14:00:28.086317 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-jg9nk" Oct 01 14:00:28 crc kubenswrapper[4605]: I1001 14:00:28.087345 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Oct 01 14:00:28 crc kubenswrapper[4605]: I1001 14:00:28.087357 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Oct 01 14:00:28 crc kubenswrapper[4605]: I1001 14:00:28.087506 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Oct 01 14:00:28 crc kubenswrapper[4605]: I1001 14:00:28.099670 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 01 14:00:28 crc kubenswrapper[4605]: I1001 14:00:28.238615 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3eaff33-3a5d-4868-ba47-a03e7ac13ab5-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"e3eaff33-3a5d-4868-ba47-a03e7ac13ab5\") " pod="openstack/ovsdbserver-sb-0" Oct 01 14:00:28 crc kubenswrapper[4605]: I1001 14:00:28.238678 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/e3eaff33-3a5d-4868-ba47-a03e7ac13ab5-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"e3eaff33-3a5d-4868-ba47-a03e7ac13ab5\") " pod="openstack/ovsdbserver-sb-0" Oct 01 14:00:28 crc kubenswrapper[4605]: I1001 14:00:28.238742 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-sb-0\" (UID: \"e3eaff33-3a5d-4868-ba47-a03e7ac13ab5\") " pod="openstack/ovsdbserver-sb-0" Oct 01 14:00:28 crc kubenswrapper[4605]: I1001 14:00:28.238801 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e3eaff33-3a5d-4868-ba47-a03e7ac13ab5-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"e3eaff33-3a5d-4868-ba47-a03e7ac13ab5\") " pod="openstack/ovsdbserver-sb-0" Oct 01 14:00:28 crc kubenswrapper[4605]: I1001 14:00:28.238834 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e3eaff33-3a5d-4868-ba47-a03e7ac13ab5-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"e3eaff33-3a5d-4868-ba47-a03e7ac13ab5\") " pod="openstack/ovsdbserver-sb-0" Oct 01 14:00:28 crc kubenswrapper[4605]: I1001 14:00:28.238858 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7kc56\" (UniqueName: \"kubernetes.io/projected/e3eaff33-3a5d-4868-ba47-a03e7ac13ab5-kube-api-access-7kc56\") pod \"ovsdbserver-sb-0\" (UID: \"e3eaff33-3a5d-4868-ba47-a03e7ac13ab5\") " pod="openstack/ovsdbserver-sb-0" Oct 01 14:00:28 crc kubenswrapper[4605]: I1001 14:00:28.238907 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e3eaff33-3a5d-4868-ba47-a03e7ac13ab5-config\") pod \"ovsdbserver-sb-0\" (UID: \"e3eaff33-3a5d-4868-ba47-a03e7ac13ab5\") " pod="openstack/ovsdbserver-sb-0" Oct 01 14:00:28 crc kubenswrapper[4605]: I1001 14:00:28.238931 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/e3eaff33-3a5d-4868-ba47-a03e7ac13ab5-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"e3eaff33-3a5d-4868-ba47-a03e7ac13ab5\") " pod="openstack/ovsdbserver-sb-0" Oct 01 14:00:28 crc kubenswrapper[4605]: I1001 14:00:28.339906 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e3eaff33-3a5d-4868-ba47-a03e7ac13ab5-config\") pod \"ovsdbserver-sb-0\" (UID: \"e3eaff33-3a5d-4868-ba47-a03e7ac13ab5\") " pod="openstack/ovsdbserver-sb-0" Oct 01 14:00:28 crc kubenswrapper[4605]: I1001 14:00:28.339951 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/e3eaff33-3a5d-4868-ba47-a03e7ac13ab5-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"e3eaff33-3a5d-4868-ba47-a03e7ac13ab5\") " pod="openstack/ovsdbserver-sb-0" Oct 01 14:00:28 crc kubenswrapper[4605]: I1001 14:00:28.340013 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3eaff33-3a5d-4868-ba47-a03e7ac13ab5-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"e3eaff33-3a5d-4868-ba47-a03e7ac13ab5\") " pod="openstack/ovsdbserver-sb-0" Oct 01 14:00:28 crc kubenswrapper[4605]: I1001 14:00:28.340045 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/e3eaff33-3a5d-4868-ba47-a03e7ac13ab5-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"e3eaff33-3a5d-4868-ba47-a03e7ac13ab5\") " pod="openstack/ovsdbserver-sb-0" Oct 01 14:00:28 crc kubenswrapper[4605]: I1001 14:00:28.340075 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-sb-0\" (UID: \"e3eaff33-3a5d-4868-ba47-a03e7ac13ab5\") " pod="openstack/ovsdbserver-sb-0" Oct 01 14:00:28 crc kubenswrapper[4605]: I1001 14:00:28.340153 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e3eaff33-3a5d-4868-ba47-a03e7ac13ab5-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"e3eaff33-3a5d-4868-ba47-a03e7ac13ab5\") " pod="openstack/ovsdbserver-sb-0" Oct 01 14:00:28 crc kubenswrapper[4605]: I1001 14:00:28.340192 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e3eaff33-3a5d-4868-ba47-a03e7ac13ab5-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"e3eaff33-3a5d-4868-ba47-a03e7ac13ab5\") " pod="openstack/ovsdbserver-sb-0" Oct 01 14:00:28 crc kubenswrapper[4605]: I1001 14:00:28.340214 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7kc56\" (UniqueName: \"kubernetes.io/projected/e3eaff33-3a5d-4868-ba47-a03e7ac13ab5-kube-api-access-7kc56\") pod \"ovsdbserver-sb-0\" (UID: \"e3eaff33-3a5d-4868-ba47-a03e7ac13ab5\") " pod="openstack/ovsdbserver-sb-0" Oct 01 14:00:28 crc kubenswrapper[4605]: I1001 14:00:28.340899 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e3eaff33-3a5d-4868-ba47-a03e7ac13ab5-config\") pod \"ovsdbserver-sb-0\" (UID: \"e3eaff33-3a5d-4868-ba47-a03e7ac13ab5\") " pod="openstack/ovsdbserver-sb-0" Oct 01 14:00:28 crc kubenswrapper[4605]: I1001 14:00:28.340911 4605 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-sb-0\" (UID: \"e3eaff33-3a5d-4868-ba47-a03e7ac13ab5\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/ovsdbserver-sb-0" Oct 01 14:00:28 crc kubenswrapper[4605]: I1001 14:00:28.341519 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/e3eaff33-3a5d-4868-ba47-a03e7ac13ab5-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"e3eaff33-3a5d-4868-ba47-a03e7ac13ab5\") " pod="openstack/ovsdbserver-sb-0" Oct 01 14:00:28 crc kubenswrapper[4605]: I1001 14:00:28.342331 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e3eaff33-3a5d-4868-ba47-a03e7ac13ab5-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"e3eaff33-3a5d-4868-ba47-a03e7ac13ab5\") " pod="openstack/ovsdbserver-sb-0" Oct 01 14:00:28 crc kubenswrapper[4605]: I1001 14:00:28.349130 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e3eaff33-3a5d-4868-ba47-a03e7ac13ab5-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"e3eaff33-3a5d-4868-ba47-a03e7ac13ab5\") " pod="openstack/ovsdbserver-sb-0" Oct 01 14:00:28 crc kubenswrapper[4605]: I1001 14:00:28.360977 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/e3eaff33-3a5d-4868-ba47-a03e7ac13ab5-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"e3eaff33-3a5d-4868-ba47-a03e7ac13ab5\") " pod="openstack/ovsdbserver-sb-0" Oct 01 14:00:28 crc kubenswrapper[4605]: I1001 14:00:28.364530 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7kc56\" (UniqueName: \"kubernetes.io/projected/e3eaff33-3a5d-4868-ba47-a03e7ac13ab5-kube-api-access-7kc56\") pod \"ovsdbserver-sb-0\" (UID: \"e3eaff33-3a5d-4868-ba47-a03e7ac13ab5\") " pod="openstack/ovsdbserver-sb-0" Oct 01 14:00:28 crc kubenswrapper[4605]: I1001 14:00:28.365690 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3eaff33-3a5d-4868-ba47-a03e7ac13ab5-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"e3eaff33-3a5d-4868-ba47-a03e7ac13ab5\") " pod="openstack/ovsdbserver-sb-0" Oct 01 14:00:28 crc kubenswrapper[4605]: I1001 14:00:28.376340 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-sb-0\" (UID: \"e3eaff33-3a5d-4868-ba47-a03e7ac13ab5\") " pod="openstack/ovsdbserver-sb-0" Oct 01 14:00:28 crc kubenswrapper[4605]: I1001 14:00:28.411069 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Oct 01 14:00:30 crc kubenswrapper[4605]: W1001 14:00:30.422197 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcfe08201_447e_4697_95cb_dfdf59dfdbe9.slice/crio-7dca412d2240bb3b17f7017375db70cbd5a7c430d112ce3ee4ef806cc26eb6be WatchSource:0}: Error finding container 7dca412d2240bb3b17f7017375db70cbd5a7c430d112ce3ee4ef806cc26eb6be: Status 404 returned error can't find the container with id 7dca412d2240bb3b17f7017375db70cbd5a7c430d112ce3ee4ef806cc26eb6be Oct 01 14:00:30 crc kubenswrapper[4605]: I1001 14:00:30.425855 4605 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 01 14:00:30 crc kubenswrapper[4605]: I1001 14:00:30.517846 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"cfe08201-447e-4697-95cb-dfdf59dfdbe9","Type":"ContainerStarted","Data":"7dca412d2240bb3b17f7017375db70cbd5a7c430d112ce3ee4ef806cc26eb6be"} Oct 01 14:00:37 crc kubenswrapper[4605]: E1001 14:00:37.398731 4605 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified" Oct 01 14:00:37 crc kubenswrapper[4605]: E1001 14:00:37.399756 4605 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-qfqwp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-server-0_openstack(d753cd5e-e85d-424c-a439-2b51cbedf76f): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 01 14:00:37 crc kubenswrapper[4605]: E1001 14:00:37.401066 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-server-0" podUID="d753cd5e-e85d-424c-a439-2b51cbedf76f" Oct 01 14:00:38 crc kubenswrapper[4605]: E1001 14:00:38.287442 4605 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Oct 01 14:00:38 crc kubenswrapper[4605]: E1001 14:00:38.287814 4605 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n659h4h664hbh658h587h67ch89h587h8fh679hc6hf9h55fh644h5d5h698h68dh5cdh5ffh669h54ch9h689hb8hd4h5bfhd8h5d7h5fh665h574q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-kctmk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-57d769cc4f-l6gfn_openstack(72ac5f32-fb05-4196-ae15-a49ac651b84c): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 01 14:00:38 crc kubenswrapper[4605]: E1001 14:00:38.288990 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-57d769cc4f-l6gfn" podUID="72ac5f32-fb05-4196-ae15-a49ac651b84c" Oct 01 14:00:38 crc kubenswrapper[4605]: E1001 14:00:38.297199 4605 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Oct 01 14:00:38 crc kubenswrapper[4605]: E1001 14:00:38.297358 4605 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n68chd6h679hbfh55fhc6h5ffh5d8h94h56ch589hb4hc5h57bh677hcdh655h8dh667h675h654h66ch567h8fh659h5b4h675h566h55bh54h67dh6dq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-v2wgh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-666b6646f7-6c8xk_openstack(e0d99f04-732e-4ed6-8d80-f2f4eeb723e1): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 01 14:00:38 crc kubenswrapper[4605]: E1001 14:00:38.298632 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-666b6646f7-6c8xk" podUID="e0d99f04-732e-4ed6-8d80-f2f4eeb723e1" Oct 01 14:00:38 crc kubenswrapper[4605]: E1001 14:00:38.580669 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified\\\"\"" pod="openstack/dnsmasq-dns-666b6646f7-6c8xk" podUID="e0d99f04-732e-4ed6-8d80-f2f4eeb723e1" Oct 01 14:00:38 crc kubenswrapper[4605]: E1001 14:00:38.584185 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified\\\"\"" pod="openstack/dnsmasq-dns-57d769cc4f-l6gfn" podUID="72ac5f32-fb05-4196-ae15-a49ac651b84c" Oct 01 14:00:40 crc kubenswrapper[4605]: E1001 14:00:40.143842 4605 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Oct 01 14:00:40 crc kubenswrapper[4605]: E1001 14:00:40.144222 4605 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-kb8bg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-675f4bcbfc-n4trn_openstack(3f3fa37c-0d11-441b-b5f7-aa027b731553): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 01 14:00:40 crc kubenswrapper[4605]: E1001 14:00:40.145792 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-675f4bcbfc-n4trn" podUID="3f3fa37c-0d11-441b-b5f7-aa027b731553" Oct 01 14:00:40 crc kubenswrapper[4605]: E1001 14:00:40.467848 4605 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Oct 01 14:00:40 crc kubenswrapper[4605]: E1001 14:00:40.467997 4605 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-jcwn9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-78dd6ddcc-h8z8s_openstack(948aa060-96bc-43b1-8321-40354d477c98): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 01 14:00:40 crc kubenswrapper[4605]: E1001 14:00:40.469999 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-78dd6ddcc-h8z8s" podUID="948aa060-96bc-43b1-8321-40354d477c98" Oct 01 14:00:41 crc kubenswrapper[4605]: I1001 14:00:41.830887 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-h8z8s" Oct 01 14:00:41 crc kubenswrapper[4605]: I1001 14:00:41.844537 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-n4trn" Oct 01 14:00:41 crc kubenswrapper[4605]: I1001 14:00:41.973703 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3f3fa37c-0d11-441b-b5f7-aa027b731553-config\") pod \"3f3fa37c-0d11-441b-b5f7-aa027b731553\" (UID: \"3f3fa37c-0d11-441b-b5f7-aa027b731553\") " Oct 01 14:00:41 crc kubenswrapper[4605]: I1001 14:00:41.973781 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/948aa060-96bc-43b1-8321-40354d477c98-dns-svc\") pod \"948aa060-96bc-43b1-8321-40354d477c98\" (UID: \"948aa060-96bc-43b1-8321-40354d477c98\") " Oct 01 14:00:41 crc kubenswrapper[4605]: I1001 14:00:41.973814 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jcwn9\" (UniqueName: \"kubernetes.io/projected/948aa060-96bc-43b1-8321-40354d477c98-kube-api-access-jcwn9\") pod \"948aa060-96bc-43b1-8321-40354d477c98\" (UID: \"948aa060-96bc-43b1-8321-40354d477c98\") " Oct 01 14:00:41 crc kubenswrapper[4605]: I1001 14:00:41.973841 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kb8bg\" (UniqueName: \"kubernetes.io/projected/3f3fa37c-0d11-441b-b5f7-aa027b731553-kube-api-access-kb8bg\") pod \"3f3fa37c-0d11-441b-b5f7-aa027b731553\" (UID: \"3f3fa37c-0d11-441b-b5f7-aa027b731553\") " Oct 01 14:00:41 crc kubenswrapper[4605]: I1001 14:00:41.973885 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/948aa060-96bc-43b1-8321-40354d477c98-config\") pod \"948aa060-96bc-43b1-8321-40354d477c98\" (UID: \"948aa060-96bc-43b1-8321-40354d477c98\") " Oct 01 14:00:41 crc kubenswrapper[4605]: I1001 14:00:41.974432 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3f3fa37c-0d11-441b-b5f7-aa027b731553-config" (OuterVolumeSpecName: "config") pod "3f3fa37c-0d11-441b-b5f7-aa027b731553" (UID: "3f3fa37c-0d11-441b-b5f7-aa027b731553"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:00:41 crc kubenswrapper[4605]: I1001 14:00:41.974491 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/948aa060-96bc-43b1-8321-40354d477c98-config" (OuterVolumeSpecName: "config") pod "948aa060-96bc-43b1-8321-40354d477c98" (UID: "948aa060-96bc-43b1-8321-40354d477c98"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:00:41 crc kubenswrapper[4605]: I1001 14:00:41.975064 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/948aa060-96bc-43b1-8321-40354d477c98-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "948aa060-96bc-43b1-8321-40354d477c98" (UID: "948aa060-96bc-43b1-8321-40354d477c98"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:00:41 crc kubenswrapper[4605]: I1001 14:00:41.988818 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/948aa060-96bc-43b1-8321-40354d477c98-kube-api-access-jcwn9" (OuterVolumeSpecName: "kube-api-access-jcwn9") pod "948aa060-96bc-43b1-8321-40354d477c98" (UID: "948aa060-96bc-43b1-8321-40354d477c98"). InnerVolumeSpecName "kube-api-access-jcwn9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:00:42 crc kubenswrapper[4605]: I1001 14:00:42.003452 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3f3fa37c-0d11-441b-b5f7-aa027b731553-kube-api-access-kb8bg" (OuterVolumeSpecName: "kube-api-access-kb8bg") pod "3f3fa37c-0d11-441b-b5f7-aa027b731553" (UID: "3f3fa37c-0d11-441b-b5f7-aa027b731553"). InnerVolumeSpecName "kube-api-access-kb8bg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:00:42 crc kubenswrapper[4605]: I1001 14:00:42.075191 4605 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3f3fa37c-0d11-441b-b5f7-aa027b731553-config\") on node \"crc\" DevicePath \"\"" Oct 01 14:00:42 crc kubenswrapper[4605]: I1001 14:00:42.075214 4605 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/948aa060-96bc-43b1-8321-40354d477c98-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 01 14:00:42 crc kubenswrapper[4605]: I1001 14:00:42.075227 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jcwn9\" (UniqueName: \"kubernetes.io/projected/948aa060-96bc-43b1-8321-40354d477c98-kube-api-access-jcwn9\") on node \"crc\" DevicePath \"\"" Oct 01 14:00:42 crc kubenswrapper[4605]: I1001 14:00:42.075240 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kb8bg\" (UniqueName: \"kubernetes.io/projected/3f3fa37c-0d11-441b-b5f7-aa027b731553-kube-api-access-kb8bg\") on node \"crc\" DevicePath \"\"" Oct 01 14:00:42 crc kubenswrapper[4605]: I1001 14:00:42.075249 4605 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/948aa060-96bc-43b1-8321-40354d477c98-config\") on node \"crc\" DevicePath \"\"" Oct 01 14:00:42 crc kubenswrapper[4605]: I1001 14:00:42.263770 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-jvb44"] Oct 01 14:00:42 crc kubenswrapper[4605]: I1001 14:00:42.400711 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 01 14:00:42 crc kubenswrapper[4605]: I1001 14:00:42.611703 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"fd313e13-49ce-49c2-be82-6ca3c0fbb2ed","Type":"ContainerStarted","Data":"248e947d7551ec9bb605c4965c1a5db19a754bb3e3c076a6302fee08cafd4c8e"} Oct 01 14:00:42 crc kubenswrapper[4605]: I1001 14:00:42.612760 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-n4trn" event={"ID":"3f3fa37c-0d11-441b-b5f7-aa027b731553","Type":"ContainerDied","Data":"86a9d3d98f06bd1dd13ab23f39238d964051b731f873eed6c5e53ea8943ee735"} Oct 01 14:00:42 crc kubenswrapper[4605]: I1001 14:00:42.612836 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-n4trn" Oct 01 14:00:42 crc kubenswrapper[4605]: I1001 14:00:42.614176 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-jvb44" event={"ID":"a37367ae-0e7b-4ad1-afb4-c48ca6282706","Type":"ContainerStarted","Data":"a12fa94295733fa0fabdc8e61eee52382f2d5bedc7a5a9bc86a096260cd1d6ab"} Oct 01 14:00:42 crc kubenswrapper[4605]: I1001 14:00:42.615288 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-h8z8s" event={"ID":"948aa060-96bc-43b1-8321-40354d477c98","Type":"ContainerDied","Data":"9e1952131784d1f83ea5c2cc836899cc3144905541ed85d28a22cabedbb2190a"} Oct 01 14:00:42 crc kubenswrapper[4605]: I1001 14:00:42.615375 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-h8z8s" Oct 01 14:00:42 crc kubenswrapper[4605]: I1001 14:00:42.617622 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"e3e2c30f-0d23-4f2a-8cc5-5bacf9bcce9e","Type":"ContainerStarted","Data":"f0d3db2f0dd3ed00fa86cad2f4abb938b578c0bdb34738ad4f544775a7e054f3"} Oct 01 14:00:42 crc kubenswrapper[4605]: I1001 14:00:42.747953 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-h8z8s"] Oct 01 14:00:42 crc kubenswrapper[4605]: I1001 14:00:42.756395 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-h8z8s"] Oct 01 14:00:42 crc kubenswrapper[4605]: I1001 14:00:42.805767 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-n4trn"] Oct 01 14:00:42 crc kubenswrapper[4605]: I1001 14:00:42.819363 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-n4trn"] Oct 01 14:00:42 crc kubenswrapper[4605]: I1001 14:00:42.940774 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 01 14:00:43 crc kubenswrapper[4605]: I1001 14:00:43.473924 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 01 14:00:43 crc kubenswrapper[4605]: I1001 14:00:43.566650 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-csqtk"] Oct 01 14:00:43 crc kubenswrapper[4605]: I1001 14:00:43.635769 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"e3eaff33-3a5d-4868-ba47-a03e7ac13ab5","Type":"ContainerStarted","Data":"424215583b21cff305b6e5e971792a8539c466b615d2984509852dfa7b7985dc"} Oct 01 14:00:43 crc kubenswrapper[4605]: I1001 14:00:43.638735 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"d753cd5e-e85d-424c-a439-2b51cbedf76f","Type":"ContainerStarted","Data":"20882a754651de043ae04d6134122f8552e8704cf3c71bf28f4e3b8a9f4daab2"} Oct 01 14:00:43 crc kubenswrapper[4605]: I1001 14:00:43.640541 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"18fdf98a-ad5b-4930-b8cc-2422242aac16","Type":"ContainerStarted","Data":"53e43957de2357c15334e32f90fbcc4bc89714062d29d00f8c8a8718dccf0c53"} Oct 01 14:00:43 crc kubenswrapper[4605]: I1001 14:00:43.644038 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"cfe08201-447e-4697-95cb-dfdf59dfdbe9","Type":"ContainerStarted","Data":"4b7462aee4ac4f3669f10068933f394ee0c47a5378c3dde46a6b0f95af74dce1"} Oct 01 14:00:43 crc kubenswrapper[4605]: I1001 14:00:43.644126 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Oct 01 14:00:43 crc kubenswrapper[4605]: I1001 14:00:43.646047 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"c414b65e-0cce-4d58-aa5d-08d0679595cd","Type":"ContainerStarted","Data":"bf6af5ad2237e45a59a6ab3c0f3baf7a6fb42e762fe1b7b2d311da95545ca93d"} Oct 01 14:00:43 crc kubenswrapper[4605]: I1001 14:00:43.647214 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-csqtk" event={"ID":"9e9376a7-1282-4f2c-b437-bf6eb57d2739","Type":"ContainerStarted","Data":"21ed6e2d1ea340320ec79ee8b23224524a0d2f3eb30a98242056aebb5bdde5d3"} Oct 01 14:00:43 crc kubenswrapper[4605]: I1001 14:00:43.649395 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"70acc9bd-54b2-4c70-bf3f-ce66a88bbd06","Type":"ContainerStarted","Data":"566ab6d18d9f587d63abbf30983ddc96a8c9da0a1b3b63be9e4e29b271a37a77"} Oct 01 14:00:43 crc kubenswrapper[4605]: I1001 14:00:43.697893 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=13.296031416 podStartE2EDuration="24.697867935s" podCreationTimestamp="2025-10-01 14:00:19 +0000 UTC" firstStartedPulling="2025-10-01 14:00:30.425644848 +0000 UTC m=+953.169621056" lastFinishedPulling="2025-10-01 14:00:41.827481367 +0000 UTC m=+964.571457575" observedRunningTime="2025-10-01 14:00:43.695847644 +0000 UTC m=+966.439823852" watchObservedRunningTime="2025-10-01 14:00:43.697867935 +0000 UTC m=+966.441844153" Oct 01 14:00:43 crc kubenswrapper[4605]: I1001 14:00:43.941897 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3f3fa37c-0d11-441b-b5f7-aa027b731553" path="/var/lib/kubelet/pods/3f3fa37c-0d11-441b-b5f7-aa027b731553/volumes" Oct 01 14:00:43 crc kubenswrapper[4605]: I1001 14:00:43.943791 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="948aa060-96bc-43b1-8321-40354d477c98" path="/var/lib/kubelet/pods/948aa060-96bc-43b1-8321-40354d477c98/volumes" Oct 01 14:00:50 crc kubenswrapper[4605]: E1001 14:00:50.023661 4605 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc414b65e_0cce_4d58_aa5d_08d0679595cd.slice/crio-bf6af5ad2237e45a59a6ab3c0f3baf7a6fb42e762fe1b7b2d311da95545ca93d.scope\": RecentStats: unable to find data in memory cache]" Oct 01 14:00:50 crc kubenswrapper[4605]: I1001 14:00:50.160600 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Oct 01 14:00:50 crc kubenswrapper[4605]: I1001 14:00:50.720784 4605 generic.go:334] "Generic (PLEG): container finished" podID="c414b65e-0cce-4d58-aa5d-08d0679595cd" containerID="bf6af5ad2237e45a59a6ab3c0f3baf7a6fb42e762fe1b7b2d311da95545ca93d" exitCode=0 Oct 01 14:00:50 crc kubenswrapper[4605]: I1001 14:00:50.720875 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"c414b65e-0cce-4d58-aa5d-08d0679595cd","Type":"ContainerDied","Data":"bf6af5ad2237e45a59a6ab3c0f3baf7a6fb42e762fe1b7b2d311da95545ca93d"} Oct 01 14:00:50 crc kubenswrapper[4605]: I1001 14:00:50.722512 4605 generic.go:334] "Generic (PLEG): container finished" podID="e3e2c30f-0d23-4f2a-8cc5-5bacf9bcce9e" containerID="f0d3db2f0dd3ed00fa86cad2f4abb938b578c0bdb34738ad4f544775a7e054f3" exitCode=0 Oct 01 14:00:50 crc kubenswrapper[4605]: I1001 14:00:50.722536 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"e3e2c30f-0d23-4f2a-8cc5-5bacf9bcce9e","Type":"ContainerDied","Data":"f0d3db2f0dd3ed00fa86cad2f4abb938b578c0bdb34738ad4f544775a7e054f3"} Oct 01 14:00:51 crc kubenswrapper[4605]: I1001 14:00:51.631316 4605 patch_prober.go:28] interesting pod/machine-config-daemon-zdjh7 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 14:00:51 crc kubenswrapper[4605]: I1001 14:00:51.631366 4605 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 14:00:51 crc kubenswrapper[4605]: I1001 14:00:51.631413 4605 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" Oct 01 14:00:51 crc kubenswrapper[4605]: I1001 14:00:51.730503 4605 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1f69fd4c8b7ea593079cde275a0913d46f3db4c2d1ad72f22e5ac983a6cab564"} pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 01 14:00:51 crc kubenswrapper[4605]: I1001 14:00:51.730581 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" containerName="machine-config-daemon" containerID="cri-o://1f69fd4c8b7ea593079cde275a0913d46f3db4c2d1ad72f22e5ac983a6cab564" gracePeriod=600 Oct 01 14:00:51 crc kubenswrapper[4605]: I1001 14:00:51.849237 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-l6gfn"] Oct 01 14:00:51 crc kubenswrapper[4605]: I1001 14:00:51.906887 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7cb5889db5-2vm8x"] Oct 01 14:00:51 crc kubenswrapper[4605]: I1001 14:00:51.908217 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7cb5889db5-2vm8x" Oct 01 14:00:51 crc kubenswrapper[4605]: I1001 14:00:51.918429 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7cb5889db5-2vm8x"] Oct 01 14:00:52 crc kubenswrapper[4605]: I1001 14:00:52.031794 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/479f5510-ba1f-46e2-86d3-39e660975097-config\") pod \"dnsmasq-dns-7cb5889db5-2vm8x\" (UID: \"479f5510-ba1f-46e2-86d3-39e660975097\") " pod="openstack/dnsmasq-dns-7cb5889db5-2vm8x" Oct 01 14:00:52 crc kubenswrapper[4605]: I1001 14:00:52.032941 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/479f5510-ba1f-46e2-86d3-39e660975097-dns-svc\") pod \"dnsmasq-dns-7cb5889db5-2vm8x\" (UID: \"479f5510-ba1f-46e2-86d3-39e660975097\") " pod="openstack/dnsmasq-dns-7cb5889db5-2vm8x" Oct 01 14:00:52 crc kubenswrapper[4605]: I1001 14:00:52.033576 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lqcf2\" (UniqueName: \"kubernetes.io/projected/479f5510-ba1f-46e2-86d3-39e660975097-kube-api-access-lqcf2\") pod \"dnsmasq-dns-7cb5889db5-2vm8x\" (UID: \"479f5510-ba1f-46e2-86d3-39e660975097\") " pod="openstack/dnsmasq-dns-7cb5889db5-2vm8x" Oct 01 14:00:52 crc kubenswrapper[4605]: I1001 14:00:52.137466 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lqcf2\" (UniqueName: \"kubernetes.io/projected/479f5510-ba1f-46e2-86d3-39e660975097-kube-api-access-lqcf2\") pod \"dnsmasq-dns-7cb5889db5-2vm8x\" (UID: \"479f5510-ba1f-46e2-86d3-39e660975097\") " pod="openstack/dnsmasq-dns-7cb5889db5-2vm8x" Oct 01 14:00:52 crc kubenswrapper[4605]: I1001 14:00:52.137541 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/479f5510-ba1f-46e2-86d3-39e660975097-config\") pod \"dnsmasq-dns-7cb5889db5-2vm8x\" (UID: \"479f5510-ba1f-46e2-86d3-39e660975097\") " pod="openstack/dnsmasq-dns-7cb5889db5-2vm8x" Oct 01 14:00:52 crc kubenswrapper[4605]: I1001 14:00:52.137608 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/479f5510-ba1f-46e2-86d3-39e660975097-dns-svc\") pod \"dnsmasq-dns-7cb5889db5-2vm8x\" (UID: \"479f5510-ba1f-46e2-86d3-39e660975097\") " pod="openstack/dnsmasq-dns-7cb5889db5-2vm8x" Oct 01 14:00:52 crc kubenswrapper[4605]: I1001 14:00:52.138421 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/479f5510-ba1f-46e2-86d3-39e660975097-dns-svc\") pod \"dnsmasq-dns-7cb5889db5-2vm8x\" (UID: \"479f5510-ba1f-46e2-86d3-39e660975097\") " pod="openstack/dnsmasq-dns-7cb5889db5-2vm8x" Oct 01 14:00:52 crc kubenswrapper[4605]: I1001 14:00:52.139118 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/479f5510-ba1f-46e2-86d3-39e660975097-config\") pod \"dnsmasq-dns-7cb5889db5-2vm8x\" (UID: \"479f5510-ba1f-46e2-86d3-39e660975097\") " pod="openstack/dnsmasq-dns-7cb5889db5-2vm8x" Oct 01 14:00:52 crc kubenswrapper[4605]: I1001 14:00:52.158179 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lqcf2\" (UniqueName: \"kubernetes.io/projected/479f5510-ba1f-46e2-86d3-39e660975097-kube-api-access-lqcf2\") pod \"dnsmasq-dns-7cb5889db5-2vm8x\" (UID: \"479f5510-ba1f-46e2-86d3-39e660975097\") " pod="openstack/dnsmasq-dns-7cb5889db5-2vm8x" Oct 01 14:00:52 crc kubenswrapper[4605]: I1001 14:00:52.234297 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7cb5889db5-2vm8x" Oct 01 14:00:53 crc kubenswrapper[4605]: I1001 14:00:53.155679 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Oct 01 14:00:53 crc kubenswrapper[4605]: I1001 14:00:53.161386 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Oct 01 14:00:53 crc kubenswrapper[4605]: I1001 14:00:53.164129 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Oct 01 14:00:53 crc kubenswrapper[4605]: I1001 14:00:53.164294 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-kdx82" Oct 01 14:00:53 crc kubenswrapper[4605]: I1001 14:00:53.164503 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Oct 01 14:00:53 crc kubenswrapper[4605]: I1001 14:00:53.170419 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Oct 01 14:00:53 crc kubenswrapper[4605]: I1001 14:00:53.270053 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Oct 01 14:00:53 crc kubenswrapper[4605]: I1001 14:00:53.272485 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d7zcl\" (UniqueName: \"kubernetes.io/projected/fedbab19-fa82-4d92-b787-de85226cd34f-kube-api-access-d7zcl\") pod \"swift-storage-0\" (UID: \"fedbab19-fa82-4d92-b787-de85226cd34f\") " pod="openstack/swift-storage-0" Oct 01 14:00:53 crc kubenswrapper[4605]: I1001 14:00:53.272527 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/fedbab19-fa82-4d92-b787-de85226cd34f-etc-swift\") pod \"swift-storage-0\" (UID: \"fedbab19-fa82-4d92-b787-de85226cd34f\") " pod="openstack/swift-storage-0" Oct 01 14:00:53 crc kubenswrapper[4605]: I1001 14:00:53.272580 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"swift-storage-0\" (UID: \"fedbab19-fa82-4d92-b787-de85226cd34f\") " pod="openstack/swift-storage-0" Oct 01 14:00:53 crc kubenswrapper[4605]: I1001 14:00:53.272612 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/fedbab19-fa82-4d92-b787-de85226cd34f-lock\") pod \"swift-storage-0\" (UID: \"fedbab19-fa82-4d92-b787-de85226cd34f\") " pod="openstack/swift-storage-0" Oct 01 14:00:53 crc kubenswrapper[4605]: I1001 14:00:53.272642 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/fedbab19-fa82-4d92-b787-de85226cd34f-cache\") pod \"swift-storage-0\" (UID: \"fedbab19-fa82-4d92-b787-de85226cd34f\") " pod="openstack/swift-storage-0" Oct 01 14:00:53 crc kubenswrapper[4605]: I1001 14:00:53.374429 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"swift-storage-0\" (UID: \"fedbab19-fa82-4d92-b787-de85226cd34f\") " pod="openstack/swift-storage-0" Oct 01 14:00:53 crc kubenswrapper[4605]: I1001 14:00:53.374496 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/fedbab19-fa82-4d92-b787-de85226cd34f-lock\") pod \"swift-storage-0\" (UID: \"fedbab19-fa82-4d92-b787-de85226cd34f\") " pod="openstack/swift-storage-0" Oct 01 14:00:53 crc kubenswrapper[4605]: I1001 14:00:53.374539 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/fedbab19-fa82-4d92-b787-de85226cd34f-cache\") pod \"swift-storage-0\" (UID: \"fedbab19-fa82-4d92-b787-de85226cd34f\") " pod="openstack/swift-storage-0" Oct 01 14:00:53 crc kubenswrapper[4605]: I1001 14:00:53.374630 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d7zcl\" (UniqueName: \"kubernetes.io/projected/fedbab19-fa82-4d92-b787-de85226cd34f-kube-api-access-d7zcl\") pod \"swift-storage-0\" (UID: \"fedbab19-fa82-4d92-b787-de85226cd34f\") " pod="openstack/swift-storage-0" Oct 01 14:00:53 crc kubenswrapper[4605]: I1001 14:00:53.374655 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/fedbab19-fa82-4d92-b787-de85226cd34f-etc-swift\") pod \"swift-storage-0\" (UID: \"fedbab19-fa82-4d92-b787-de85226cd34f\") " pod="openstack/swift-storage-0" Oct 01 14:00:53 crc kubenswrapper[4605]: E1001 14:00:53.374824 4605 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 01 14:00:53 crc kubenswrapper[4605]: E1001 14:00:53.374840 4605 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 01 14:00:53 crc kubenswrapper[4605]: I1001 14:00:53.374857 4605 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"swift-storage-0\" (UID: \"fedbab19-fa82-4d92-b787-de85226cd34f\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/swift-storage-0" Oct 01 14:00:53 crc kubenswrapper[4605]: E1001 14:00:53.374892 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/fedbab19-fa82-4d92-b787-de85226cd34f-etc-swift podName:fedbab19-fa82-4d92-b787-de85226cd34f nodeName:}" failed. No retries permitted until 2025-10-01 14:00:53.874872404 +0000 UTC m=+976.618848612 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/fedbab19-fa82-4d92-b787-de85226cd34f-etc-swift") pod "swift-storage-0" (UID: "fedbab19-fa82-4d92-b787-de85226cd34f") : configmap "swift-ring-files" not found Oct 01 14:00:53 crc kubenswrapper[4605]: I1001 14:00:53.375036 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/fedbab19-fa82-4d92-b787-de85226cd34f-lock\") pod \"swift-storage-0\" (UID: \"fedbab19-fa82-4d92-b787-de85226cd34f\") " pod="openstack/swift-storage-0" Oct 01 14:00:53 crc kubenswrapper[4605]: I1001 14:00:53.375364 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/fedbab19-fa82-4d92-b787-de85226cd34f-cache\") pod \"swift-storage-0\" (UID: \"fedbab19-fa82-4d92-b787-de85226cd34f\") " pod="openstack/swift-storage-0" Oct 01 14:00:53 crc kubenswrapper[4605]: I1001 14:00:53.396503 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d7zcl\" (UniqueName: \"kubernetes.io/projected/fedbab19-fa82-4d92-b787-de85226cd34f-kube-api-access-d7zcl\") pod \"swift-storage-0\" (UID: \"fedbab19-fa82-4d92-b787-de85226cd34f\") " pod="openstack/swift-storage-0" Oct 01 14:00:53 crc kubenswrapper[4605]: I1001 14:00:53.399433 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"swift-storage-0\" (UID: \"fedbab19-fa82-4d92-b787-de85226cd34f\") " pod="openstack/swift-storage-0" Oct 01 14:00:53 crc kubenswrapper[4605]: I1001 14:00:53.704570 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-x8zcm"] Oct 01 14:00:53 crc kubenswrapper[4605]: I1001 14:00:53.705683 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-x8zcm" Oct 01 14:00:53 crc kubenswrapper[4605]: I1001 14:00:53.710429 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Oct 01 14:00:53 crc kubenswrapper[4605]: I1001 14:00:53.710707 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Oct 01 14:00:53 crc kubenswrapper[4605]: I1001 14:00:53.710820 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Oct 01 14:00:53 crc kubenswrapper[4605]: I1001 14:00:53.717218 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-x8zcm"] Oct 01 14:00:53 crc kubenswrapper[4605]: I1001 14:00:53.781900 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4c3df3b9-829b-4ebb-9593-487b1f6ddce1-scripts\") pod \"swift-ring-rebalance-x8zcm\" (UID: \"4c3df3b9-829b-4ebb-9593-487b1f6ddce1\") " pod="openstack/swift-ring-rebalance-x8zcm" Oct 01 14:00:53 crc kubenswrapper[4605]: I1001 14:00:53.781943 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7kktx\" (UniqueName: \"kubernetes.io/projected/4c3df3b9-829b-4ebb-9593-487b1f6ddce1-kube-api-access-7kktx\") pod \"swift-ring-rebalance-x8zcm\" (UID: \"4c3df3b9-829b-4ebb-9593-487b1f6ddce1\") " pod="openstack/swift-ring-rebalance-x8zcm" Oct 01 14:00:53 crc kubenswrapper[4605]: I1001 14:00:53.781974 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/4c3df3b9-829b-4ebb-9593-487b1f6ddce1-etc-swift\") pod \"swift-ring-rebalance-x8zcm\" (UID: \"4c3df3b9-829b-4ebb-9593-487b1f6ddce1\") " pod="openstack/swift-ring-rebalance-x8zcm" Oct 01 14:00:53 crc kubenswrapper[4605]: I1001 14:00:53.781988 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/4c3df3b9-829b-4ebb-9593-487b1f6ddce1-ring-data-devices\") pod \"swift-ring-rebalance-x8zcm\" (UID: \"4c3df3b9-829b-4ebb-9593-487b1f6ddce1\") " pod="openstack/swift-ring-rebalance-x8zcm" Oct 01 14:00:53 crc kubenswrapper[4605]: I1001 14:00:53.782046 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c3df3b9-829b-4ebb-9593-487b1f6ddce1-combined-ca-bundle\") pod \"swift-ring-rebalance-x8zcm\" (UID: \"4c3df3b9-829b-4ebb-9593-487b1f6ddce1\") " pod="openstack/swift-ring-rebalance-x8zcm" Oct 01 14:00:53 crc kubenswrapper[4605]: I1001 14:00:53.782219 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/4c3df3b9-829b-4ebb-9593-487b1f6ddce1-swiftconf\") pod \"swift-ring-rebalance-x8zcm\" (UID: \"4c3df3b9-829b-4ebb-9593-487b1f6ddce1\") " pod="openstack/swift-ring-rebalance-x8zcm" Oct 01 14:00:53 crc kubenswrapper[4605]: I1001 14:00:53.782256 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/4c3df3b9-829b-4ebb-9593-487b1f6ddce1-dispersionconf\") pod \"swift-ring-rebalance-x8zcm\" (UID: \"4c3df3b9-829b-4ebb-9593-487b1f6ddce1\") " pod="openstack/swift-ring-rebalance-x8zcm" Oct 01 14:00:53 crc kubenswrapper[4605]: I1001 14:00:53.883577 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/4c3df3b9-829b-4ebb-9593-487b1f6ddce1-swiftconf\") pod \"swift-ring-rebalance-x8zcm\" (UID: \"4c3df3b9-829b-4ebb-9593-487b1f6ddce1\") " pod="openstack/swift-ring-rebalance-x8zcm" Oct 01 14:00:53 crc kubenswrapper[4605]: I1001 14:00:53.883623 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/4c3df3b9-829b-4ebb-9593-487b1f6ddce1-dispersionconf\") pod \"swift-ring-rebalance-x8zcm\" (UID: \"4c3df3b9-829b-4ebb-9593-487b1f6ddce1\") " pod="openstack/swift-ring-rebalance-x8zcm" Oct 01 14:00:53 crc kubenswrapper[4605]: I1001 14:00:53.883654 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4c3df3b9-829b-4ebb-9593-487b1f6ddce1-scripts\") pod \"swift-ring-rebalance-x8zcm\" (UID: \"4c3df3b9-829b-4ebb-9593-487b1f6ddce1\") " pod="openstack/swift-ring-rebalance-x8zcm" Oct 01 14:00:53 crc kubenswrapper[4605]: I1001 14:00:53.883674 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7kktx\" (UniqueName: \"kubernetes.io/projected/4c3df3b9-829b-4ebb-9593-487b1f6ddce1-kube-api-access-7kktx\") pod \"swift-ring-rebalance-x8zcm\" (UID: \"4c3df3b9-829b-4ebb-9593-487b1f6ddce1\") " pod="openstack/swift-ring-rebalance-x8zcm" Oct 01 14:00:53 crc kubenswrapper[4605]: I1001 14:00:53.883697 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/4c3df3b9-829b-4ebb-9593-487b1f6ddce1-etc-swift\") pod \"swift-ring-rebalance-x8zcm\" (UID: \"4c3df3b9-829b-4ebb-9593-487b1f6ddce1\") " pod="openstack/swift-ring-rebalance-x8zcm" Oct 01 14:00:53 crc kubenswrapper[4605]: I1001 14:00:53.883714 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/4c3df3b9-829b-4ebb-9593-487b1f6ddce1-ring-data-devices\") pod \"swift-ring-rebalance-x8zcm\" (UID: \"4c3df3b9-829b-4ebb-9593-487b1f6ddce1\") " pod="openstack/swift-ring-rebalance-x8zcm" Oct 01 14:00:53 crc kubenswrapper[4605]: I1001 14:00:53.883736 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/fedbab19-fa82-4d92-b787-de85226cd34f-etc-swift\") pod \"swift-storage-0\" (UID: \"fedbab19-fa82-4d92-b787-de85226cd34f\") " pod="openstack/swift-storage-0" Oct 01 14:00:53 crc kubenswrapper[4605]: I1001 14:00:53.883780 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c3df3b9-829b-4ebb-9593-487b1f6ddce1-combined-ca-bundle\") pod \"swift-ring-rebalance-x8zcm\" (UID: \"4c3df3b9-829b-4ebb-9593-487b1f6ddce1\") " pod="openstack/swift-ring-rebalance-x8zcm" Oct 01 14:00:53 crc kubenswrapper[4605]: I1001 14:00:53.884127 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/4c3df3b9-829b-4ebb-9593-487b1f6ddce1-etc-swift\") pod \"swift-ring-rebalance-x8zcm\" (UID: \"4c3df3b9-829b-4ebb-9593-487b1f6ddce1\") " pod="openstack/swift-ring-rebalance-x8zcm" Oct 01 14:00:53 crc kubenswrapper[4605]: E1001 14:00:53.884388 4605 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 01 14:00:53 crc kubenswrapper[4605]: E1001 14:00:53.884488 4605 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 01 14:00:53 crc kubenswrapper[4605]: E1001 14:00:53.884617 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/fedbab19-fa82-4d92-b787-de85226cd34f-etc-swift podName:fedbab19-fa82-4d92-b787-de85226cd34f nodeName:}" failed. No retries permitted until 2025-10-01 14:00:54.884594757 +0000 UTC m=+977.628570975 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/fedbab19-fa82-4d92-b787-de85226cd34f-etc-swift") pod "swift-storage-0" (UID: "fedbab19-fa82-4d92-b787-de85226cd34f") : configmap "swift-ring-files" not found Oct 01 14:00:53 crc kubenswrapper[4605]: I1001 14:00:53.884770 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/4c3df3b9-829b-4ebb-9593-487b1f6ddce1-ring-data-devices\") pod \"swift-ring-rebalance-x8zcm\" (UID: \"4c3df3b9-829b-4ebb-9593-487b1f6ddce1\") " pod="openstack/swift-ring-rebalance-x8zcm" Oct 01 14:00:53 crc kubenswrapper[4605]: I1001 14:00:53.884854 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4c3df3b9-829b-4ebb-9593-487b1f6ddce1-scripts\") pod \"swift-ring-rebalance-x8zcm\" (UID: \"4c3df3b9-829b-4ebb-9593-487b1f6ddce1\") " pod="openstack/swift-ring-rebalance-x8zcm" Oct 01 14:00:53 crc kubenswrapper[4605]: I1001 14:00:53.886794 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c3df3b9-829b-4ebb-9593-487b1f6ddce1-combined-ca-bundle\") pod \"swift-ring-rebalance-x8zcm\" (UID: \"4c3df3b9-829b-4ebb-9593-487b1f6ddce1\") " pod="openstack/swift-ring-rebalance-x8zcm" Oct 01 14:00:53 crc kubenswrapper[4605]: I1001 14:00:53.894335 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/4c3df3b9-829b-4ebb-9593-487b1f6ddce1-dispersionconf\") pod \"swift-ring-rebalance-x8zcm\" (UID: \"4c3df3b9-829b-4ebb-9593-487b1f6ddce1\") " pod="openstack/swift-ring-rebalance-x8zcm" Oct 01 14:00:53 crc kubenswrapper[4605]: I1001 14:00:53.909955 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7kktx\" (UniqueName: \"kubernetes.io/projected/4c3df3b9-829b-4ebb-9593-487b1f6ddce1-kube-api-access-7kktx\") pod \"swift-ring-rebalance-x8zcm\" (UID: \"4c3df3b9-829b-4ebb-9593-487b1f6ddce1\") " pod="openstack/swift-ring-rebalance-x8zcm" Oct 01 14:00:53 crc kubenswrapper[4605]: I1001 14:00:53.911051 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/4c3df3b9-829b-4ebb-9593-487b1f6ddce1-swiftconf\") pod \"swift-ring-rebalance-x8zcm\" (UID: \"4c3df3b9-829b-4ebb-9593-487b1f6ddce1\") " pod="openstack/swift-ring-rebalance-x8zcm" Oct 01 14:00:54 crc kubenswrapper[4605]: I1001 14:00:54.025447 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-x8zcm" Oct 01 14:00:54 crc kubenswrapper[4605]: I1001 14:00:54.766156 4605 generic.go:334] "Generic (PLEG): container finished" podID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" containerID="1f69fd4c8b7ea593079cde275a0913d46f3db4c2d1ad72f22e5ac983a6cab564" exitCode=0 Oct 01 14:00:54 crc kubenswrapper[4605]: I1001 14:00:54.766193 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" event={"ID":"f3023060-c8ae-492b-b1cb-a418d9a8e59f","Type":"ContainerDied","Data":"1f69fd4c8b7ea593079cde275a0913d46f3db4c2d1ad72f22e5ac983a6cab564"} Oct 01 14:00:54 crc kubenswrapper[4605]: I1001 14:00:54.766224 4605 scope.go:117] "RemoveContainer" containerID="57b43180ba9a3ef7b3e3cb1260253e07ed74787366be1e64f3a3708a8ee8ce49" Oct 01 14:00:54 crc kubenswrapper[4605]: I1001 14:00:54.880567 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-l6gfn" Oct 01 14:00:54 crc kubenswrapper[4605]: I1001 14:00:54.902927 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/fedbab19-fa82-4d92-b787-de85226cd34f-etc-swift\") pod \"swift-storage-0\" (UID: \"fedbab19-fa82-4d92-b787-de85226cd34f\") " pod="openstack/swift-storage-0" Oct 01 14:00:54 crc kubenswrapper[4605]: E1001 14:00:54.903198 4605 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 01 14:00:54 crc kubenswrapper[4605]: E1001 14:00:54.903216 4605 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 01 14:00:54 crc kubenswrapper[4605]: E1001 14:00:54.903257 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/fedbab19-fa82-4d92-b787-de85226cd34f-etc-swift podName:fedbab19-fa82-4d92-b787-de85226cd34f nodeName:}" failed. No retries permitted until 2025-10-01 14:00:56.903243583 +0000 UTC m=+979.647219791 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/fedbab19-fa82-4d92-b787-de85226cd34f-etc-swift") pod "swift-storage-0" (UID: "fedbab19-fa82-4d92-b787-de85226cd34f") : configmap "swift-ring-files" not found Oct 01 14:00:55 crc kubenswrapper[4605]: I1001 14:00:55.003566 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kctmk\" (UniqueName: \"kubernetes.io/projected/72ac5f32-fb05-4196-ae15-a49ac651b84c-kube-api-access-kctmk\") pod \"72ac5f32-fb05-4196-ae15-a49ac651b84c\" (UID: \"72ac5f32-fb05-4196-ae15-a49ac651b84c\") " Oct 01 14:00:55 crc kubenswrapper[4605]: I1001 14:00:55.003953 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/72ac5f32-fb05-4196-ae15-a49ac651b84c-config\") pod \"72ac5f32-fb05-4196-ae15-a49ac651b84c\" (UID: \"72ac5f32-fb05-4196-ae15-a49ac651b84c\") " Oct 01 14:00:55 crc kubenswrapper[4605]: I1001 14:00:55.004036 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/72ac5f32-fb05-4196-ae15-a49ac651b84c-dns-svc\") pod \"72ac5f32-fb05-4196-ae15-a49ac651b84c\" (UID: \"72ac5f32-fb05-4196-ae15-a49ac651b84c\") " Oct 01 14:00:55 crc kubenswrapper[4605]: I1001 14:00:55.004462 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/72ac5f32-fb05-4196-ae15-a49ac651b84c-config" (OuterVolumeSpecName: "config") pod "72ac5f32-fb05-4196-ae15-a49ac651b84c" (UID: "72ac5f32-fb05-4196-ae15-a49ac651b84c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:00:55 crc kubenswrapper[4605]: I1001 14:00:55.004868 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/72ac5f32-fb05-4196-ae15-a49ac651b84c-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "72ac5f32-fb05-4196-ae15-a49ac651b84c" (UID: "72ac5f32-fb05-4196-ae15-a49ac651b84c"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:00:55 crc kubenswrapper[4605]: I1001 14:00:55.006445 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/72ac5f32-fb05-4196-ae15-a49ac651b84c-kube-api-access-kctmk" (OuterVolumeSpecName: "kube-api-access-kctmk") pod "72ac5f32-fb05-4196-ae15-a49ac651b84c" (UID: "72ac5f32-fb05-4196-ae15-a49ac651b84c"). InnerVolumeSpecName "kube-api-access-kctmk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:00:55 crc kubenswrapper[4605]: I1001 14:00:55.057279 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7cb5889db5-2vm8x"] Oct 01 14:00:55 crc kubenswrapper[4605]: I1001 14:00:55.107240 4605 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/72ac5f32-fb05-4196-ae15-a49ac651b84c-config\") on node \"crc\" DevicePath \"\"" Oct 01 14:00:55 crc kubenswrapper[4605]: I1001 14:00:55.107267 4605 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/72ac5f32-fb05-4196-ae15-a49ac651b84c-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 01 14:00:55 crc kubenswrapper[4605]: I1001 14:00:55.107276 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kctmk\" (UniqueName: \"kubernetes.io/projected/72ac5f32-fb05-4196-ae15-a49ac651b84c-kube-api-access-kctmk\") on node \"crc\" DevicePath \"\"" Oct 01 14:00:55 crc kubenswrapper[4605]: W1001 14:00:55.115778 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod479f5510_ba1f_46e2_86d3_39e660975097.slice/crio-ae4ab55c3f603588f5520a248da8c68d208205ff83f92f937741ecb151262957 WatchSource:0}: Error finding container ae4ab55c3f603588f5520a248da8c68d208205ff83f92f937741ecb151262957: Status 404 returned error can't find the container with id ae4ab55c3f603588f5520a248da8c68d208205ff83f92f937741ecb151262957 Oct 01 14:00:55 crc kubenswrapper[4605]: I1001 14:00:55.421694 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-x8zcm"] Oct 01 14:00:55 crc kubenswrapper[4605]: I1001 14:00:55.778982 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" event={"ID":"f3023060-c8ae-492b-b1cb-a418d9a8e59f","Type":"ContainerStarted","Data":"26228f282f385d65bcc8a30f3ba1b4954e3d59ec9adad591dd318d09c86924ce"} Oct 01 14:00:55 crc kubenswrapper[4605]: I1001 14:00:55.781792 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"e3e2c30f-0d23-4f2a-8cc5-5bacf9bcce9e","Type":"ContainerStarted","Data":"be00b26aa6bdc30ca78797aaba1bd9a2b3731255cd95b74ce224021589fa0b50"} Oct 01 14:00:55 crc kubenswrapper[4605]: I1001 14:00:55.786268 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-x8zcm" event={"ID":"4c3df3b9-829b-4ebb-9593-487b1f6ddce1","Type":"ContainerStarted","Data":"97ec14ed044d1342826efacfa09091d27a4c4b59c353f8f30449b02077c962bd"} Oct 01 14:00:55 crc kubenswrapper[4605]: I1001 14:00:55.787757 4605 generic.go:334] "Generic (PLEG): container finished" podID="e0d99f04-732e-4ed6-8d80-f2f4eeb723e1" containerID="627eeff6d8baf6c1be93ded39fd85eabf17a6af3e4f0e503f0374860cb73252e" exitCode=0 Oct 01 14:00:55 crc kubenswrapper[4605]: I1001 14:00:55.787801 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-6c8xk" event={"ID":"e0d99f04-732e-4ed6-8d80-f2f4eeb723e1","Type":"ContainerDied","Data":"627eeff6d8baf6c1be93ded39fd85eabf17a6af3e4f0e503f0374860cb73252e"} Oct 01 14:00:55 crc kubenswrapper[4605]: I1001 14:00:55.797023 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-csqtk" event={"ID":"9e9376a7-1282-4f2c-b437-bf6eb57d2739","Type":"ContainerStarted","Data":"ee2180154ac59edf28b3e31136575c9101fea430f68fd2ebb95a9d08bbbb7b7b"} Oct 01 14:00:55 crc kubenswrapper[4605]: I1001 14:00:55.815630 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"70acc9bd-54b2-4c70-bf3f-ce66a88bbd06","Type":"ContainerStarted","Data":"79f85dcb3542a28d150227ece0731ba5d4fde3a1d2b836c662d9dcda5f193e2d"} Oct 01 14:00:55 crc kubenswrapper[4605]: I1001 14:00:55.822556 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"e3eaff33-3a5d-4868-ba47-a03e7ac13ab5","Type":"ContainerStarted","Data":"beaced01a7128d109fc99105af0a0e32e8482784a63bac2d6d5c86fc8d9898b1"} Oct 01 14:00:55 crc kubenswrapper[4605]: I1001 14:00:55.823601 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-l6gfn" event={"ID":"72ac5f32-fb05-4196-ae15-a49ac651b84c","Type":"ContainerDied","Data":"71a19a6ff39b74e2276932c8b67b9e969565f33ef15223733633ddc17f6d0144"} Oct 01 14:00:55 crc kubenswrapper[4605]: I1001 14:00:55.823669 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-l6gfn" Oct 01 14:00:55 crc kubenswrapper[4605]: I1001 14:00:55.851508 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"c414b65e-0cce-4d58-aa5d-08d0679595cd","Type":"ContainerStarted","Data":"42a44a912058816bca51af989ea273aa0d0de0fa1ba3c897fe50559923fb743b"} Oct 01 14:00:55 crc kubenswrapper[4605]: I1001 14:00:55.858952 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"fd313e13-49ce-49c2-be82-6ca3c0fbb2ed","Type":"ContainerStarted","Data":"c86da58e244c9ce37241f58f84c24a81ed6db6946557e5d54be22c53fc35b6c4"} Oct 01 14:00:55 crc kubenswrapper[4605]: I1001 14:00:55.859698 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Oct 01 14:00:55 crc kubenswrapper[4605]: I1001 14:00:55.868510 4605 generic.go:334] "Generic (PLEG): container finished" podID="479f5510-ba1f-46e2-86d3-39e660975097" containerID="0eedaf45e1e8614a5094bd9cbc29f6643f7ea32b08db9c813d9a282b31958bb4" exitCode=0 Oct 01 14:00:55 crc kubenswrapper[4605]: I1001 14:00:55.868676 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cb5889db5-2vm8x" event={"ID":"479f5510-ba1f-46e2-86d3-39e660975097","Type":"ContainerDied","Data":"0eedaf45e1e8614a5094bd9cbc29f6643f7ea32b08db9c813d9a282b31958bb4"} Oct 01 14:00:55 crc kubenswrapper[4605]: I1001 14:00:55.868773 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cb5889db5-2vm8x" event={"ID":"479f5510-ba1f-46e2-86d3-39e660975097","Type":"ContainerStarted","Data":"ae4ab55c3f603588f5520a248da8c68d208205ff83f92f937741ecb151262957"} Oct 01 14:00:55 crc kubenswrapper[4605]: I1001 14:00:55.879223 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-jvb44" event={"ID":"a37367ae-0e7b-4ad1-afb4-c48ca6282706","Type":"ContainerStarted","Data":"c5c415e9b06e4936ecd7e2ff84a35140e59875f8137c87596c4a01ab246acc34"} Oct 01 14:00:55 crc kubenswrapper[4605]: I1001 14:00:55.880127 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-jvb44" Oct 01 14:00:55 crc kubenswrapper[4605]: I1001 14:00:55.941938 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=17.061288615 podStartE2EDuration="38.941882776s" podCreationTimestamp="2025-10-01 14:00:17 +0000 UTC" firstStartedPulling="2025-10-01 14:00:20.032712355 +0000 UTC m=+942.776688563" lastFinishedPulling="2025-10-01 14:00:41.913306516 +0000 UTC m=+964.657282724" observedRunningTime="2025-10-01 14:00:55.909191447 +0000 UTC m=+978.653167675" watchObservedRunningTime="2025-10-01 14:00:55.941882776 +0000 UTC m=+978.685858984" Oct 01 14:00:55 crc kubenswrapper[4605]: I1001 14:00:55.965322 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=17.492699618 podStartE2EDuration="38.96530352s" podCreationTimestamp="2025-10-01 14:00:17 +0000 UTC" firstStartedPulling="2025-10-01 14:00:20.352470024 +0000 UTC m=+943.096446232" lastFinishedPulling="2025-10-01 14:00:41.825073926 +0000 UTC m=+964.569050134" observedRunningTime="2025-10-01 14:00:55.960126539 +0000 UTC m=+978.704102757" watchObservedRunningTime="2025-10-01 14:00:55.96530352 +0000 UTC m=+978.709279728" Oct 01 14:00:56 crc kubenswrapper[4605]: I1001 14:00:56.010554 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-jvb44" podStartSLOduration=18.471303834 podStartE2EDuration="31.010536058s" podCreationTimestamp="2025-10-01 14:00:25 +0000 UTC" firstStartedPulling="2025-10-01 14:00:42.361994078 +0000 UTC m=+965.105970286" lastFinishedPulling="2025-10-01 14:00:54.901226302 +0000 UTC m=+977.645202510" observedRunningTime="2025-10-01 14:00:55.994812259 +0000 UTC m=+978.738788467" watchObservedRunningTime="2025-10-01 14:00:56.010536058 +0000 UTC m=+978.754512266" Oct 01 14:00:56 crc kubenswrapper[4605]: I1001 14:00:56.016984 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=22.496015844 podStartE2EDuration="35.016949591s" podCreationTimestamp="2025-10-01 14:00:21 +0000 UTC" firstStartedPulling="2025-10-01 14:00:42.463706801 +0000 UTC m=+965.207683009" lastFinishedPulling="2025-10-01 14:00:54.984640278 +0000 UTC m=+977.728616756" observedRunningTime="2025-10-01 14:00:56.006318331 +0000 UTC m=+978.750294539" watchObservedRunningTime="2025-10-01 14:00:56.016949591 +0000 UTC m=+978.760925799" Oct 01 14:00:56 crc kubenswrapper[4605]: E1001 14:00:56.054147 4605 log.go:32] "CreateContainer in sandbox from runtime service failed" err=< Oct 01 14:00:56 crc kubenswrapper[4605]: rpc error: code = Unknown desc = container create failed: mount `/var/lib/kubelet/pods/e0d99f04-732e-4ed6-8d80-f2f4eeb723e1/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Oct 01 14:00:56 crc kubenswrapper[4605]: > podSandboxID="770e1711c5bed729719a1a7e2b4a4ec3efb0ea731c5ccb5fd4b198b0da59ece9" Oct 01 14:00:56 crc kubenswrapper[4605]: E1001 14:00:56.054342 4605 kuberuntime_manager.go:1274] "Unhandled Error" err=< Oct 01 14:00:56 crc kubenswrapper[4605]: container &Container{Name:dnsmasq-dns,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n68chd6h679hbfh55fhc6h5ffh5d8h94h56ch589hb4hc5h57bh677hcdh655h8dh667h675h654h66ch567h8fh659h5b4h675h566h55bh54h67dh6dq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-v2wgh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-666b6646f7-6c8xk_openstack(e0d99f04-732e-4ed6-8d80-f2f4eeb723e1): CreateContainerError: container create failed: mount `/var/lib/kubelet/pods/e0d99f04-732e-4ed6-8d80-f2f4eeb723e1/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Oct 01 14:00:56 crc kubenswrapper[4605]: > logger="UnhandledError" Oct 01 14:00:56 crc kubenswrapper[4605]: E1001 14:00:56.061700 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"dnsmasq-dns\" with CreateContainerError: \"container create failed: mount `/var/lib/kubelet/pods/e0d99f04-732e-4ed6-8d80-f2f4eeb723e1/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory\\n\"" pod="openstack/dnsmasq-dns-666b6646f7-6c8xk" podUID="e0d99f04-732e-4ed6-8d80-f2f4eeb723e1" Oct 01 14:00:56 crc kubenswrapper[4605]: I1001 14:00:56.078151 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-l6gfn"] Oct 01 14:00:56 crc kubenswrapper[4605]: I1001 14:00:56.084550 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-l6gfn"] Oct 01 14:00:56 crc kubenswrapper[4605]: I1001 14:00:56.888191 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cb5889db5-2vm8x" event={"ID":"479f5510-ba1f-46e2-86d3-39e660975097","Type":"ContainerStarted","Data":"1d431248fbb83db87abccccce088031e5d2af7647a73a31c69c534cfe5370530"} Oct 01 14:00:56 crc kubenswrapper[4605]: I1001 14:00:56.889240 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7cb5889db5-2vm8x" Oct 01 14:00:56 crc kubenswrapper[4605]: I1001 14:00:56.892107 4605 generic.go:334] "Generic (PLEG): container finished" podID="9e9376a7-1282-4f2c-b437-bf6eb57d2739" containerID="ee2180154ac59edf28b3e31136575c9101fea430f68fd2ebb95a9d08bbbb7b7b" exitCode=0 Oct 01 14:00:56 crc kubenswrapper[4605]: I1001 14:00:56.892231 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-csqtk" event={"ID":"9e9376a7-1282-4f2c-b437-bf6eb57d2739","Type":"ContainerDied","Data":"ee2180154ac59edf28b3e31136575c9101fea430f68fd2ebb95a9d08bbbb7b7b"} Oct 01 14:00:56 crc kubenswrapper[4605]: I1001 14:00:56.919059 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7cb5889db5-2vm8x" podStartSLOduration=5.9190368190000004 podStartE2EDuration="5.919036819s" podCreationTimestamp="2025-10-01 14:00:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 14:00:56.903016273 +0000 UTC m=+979.646992481" watchObservedRunningTime="2025-10-01 14:00:56.919036819 +0000 UTC m=+979.663013027" Oct 01 14:00:56 crc kubenswrapper[4605]: I1001 14:00:56.962986 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/fedbab19-fa82-4d92-b787-de85226cd34f-etc-swift\") pod \"swift-storage-0\" (UID: \"fedbab19-fa82-4d92-b787-de85226cd34f\") " pod="openstack/swift-storage-0" Oct 01 14:00:56 crc kubenswrapper[4605]: E1001 14:00:56.967171 4605 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 01 14:00:56 crc kubenswrapper[4605]: E1001 14:00:56.967343 4605 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 01 14:00:56 crc kubenswrapper[4605]: E1001 14:00:56.967466 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/fedbab19-fa82-4d92-b787-de85226cd34f-etc-swift podName:fedbab19-fa82-4d92-b787-de85226cd34f nodeName:}" failed. No retries permitted until 2025-10-01 14:01:00.967444618 +0000 UTC m=+983.711420896 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/fedbab19-fa82-4d92-b787-de85226cd34f-etc-swift") pod "swift-storage-0" (UID: "fedbab19-fa82-4d92-b787-de85226cd34f") : configmap "swift-ring-files" not found Oct 01 14:00:57 crc kubenswrapper[4605]: I1001 14:00:57.900922 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-csqtk" event={"ID":"9e9376a7-1282-4f2c-b437-bf6eb57d2739","Type":"ContainerStarted","Data":"2ce336e2a90b4c5f8293744b15a65c23fd8aaa1a089c5d4bc19ac87db633ccd2"} Oct 01 14:00:57 crc kubenswrapper[4605]: I1001 14:00:57.938581 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="72ac5f32-fb05-4196-ae15-a49ac651b84c" path="/var/lib/kubelet/pods/72ac5f32-fb05-4196-ae15-a49ac651b84c/volumes" Oct 01 14:00:59 crc kubenswrapper[4605]: I1001 14:00:59.093999 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Oct 01 14:00:59 crc kubenswrapper[4605]: I1001 14:00:59.094281 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Oct 01 14:00:59 crc kubenswrapper[4605]: I1001 14:00:59.167438 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Oct 01 14:00:59 crc kubenswrapper[4605]: I1001 14:00:59.167486 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Oct 01 14:01:01 crc kubenswrapper[4605]: I1001 14:01:01.030777 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/fedbab19-fa82-4d92-b787-de85226cd34f-etc-swift\") pod \"swift-storage-0\" (UID: \"fedbab19-fa82-4d92-b787-de85226cd34f\") " pod="openstack/swift-storage-0" Oct 01 14:01:01 crc kubenswrapper[4605]: E1001 14:01:01.030954 4605 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 01 14:01:01 crc kubenswrapper[4605]: E1001 14:01:01.031447 4605 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 01 14:01:01 crc kubenswrapper[4605]: E1001 14:01:01.031509 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/fedbab19-fa82-4d92-b787-de85226cd34f-etc-swift podName:fedbab19-fa82-4d92-b787-de85226cd34f nodeName:}" failed. No retries permitted until 2025-10-01 14:01:09.031489062 +0000 UTC m=+991.775465270 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/fedbab19-fa82-4d92-b787-de85226cd34f-etc-swift") pod "swift-storage-0" (UID: "fedbab19-fa82-4d92-b787-de85226cd34f") : configmap "swift-ring-files" not found Oct 01 14:01:01 crc kubenswrapper[4605]: I1001 14:01:01.147717 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Oct 01 14:01:01 crc kubenswrapper[4605]: I1001 14:01:01.212781 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Oct 01 14:01:01 crc kubenswrapper[4605]: I1001 14:01:01.796197 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Oct 01 14:01:01 crc kubenswrapper[4605]: I1001 14:01:01.940730 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"70acc9bd-54b2-4c70-bf3f-ce66a88bbd06","Type":"ContainerStarted","Data":"af1fc4e8caa5de6f6d4a23526538e7ea39c498da3bce05678be88316e87a6282"} Oct 01 14:01:01 crc kubenswrapper[4605]: I1001 14:01:01.942653 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"e3eaff33-3a5d-4868-ba47-a03e7ac13ab5","Type":"ContainerStarted","Data":"de5e980f8c8fae7032fab0f53d80e17dfbc802c0c3e5f9255b449050d9080ec1"} Oct 01 14:01:01 crc kubenswrapper[4605]: I1001 14:01:01.944351 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-x8zcm" event={"ID":"4c3df3b9-829b-4ebb-9593-487b1f6ddce1","Type":"ContainerStarted","Data":"10262fe6c5244d7199a82329e4f9003b4be448295e97eee8fea9a9f656c66597"} Oct 01 14:01:01 crc kubenswrapper[4605]: I1001 14:01:01.955729 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-csqtk" event={"ID":"9e9376a7-1282-4f2c-b437-bf6eb57d2739","Type":"ContainerStarted","Data":"4ef097233254705d013db7d980706a70de0349479bb0c3d1d1c70e69bfbc8651"} Oct 01 14:01:01 crc kubenswrapper[4605]: I1001 14:01:01.955968 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-csqtk" Oct 01 14:01:01 crc kubenswrapper[4605]: I1001 14:01:01.956040 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-csqtk" Oct 01 14:01:01 crc kubenswrapper[4605]: I1001 14:01:01.979004 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=18.659067801 podStartE2EDuration="36.978983133s" podCreationTimestamp="2025-10-01 14:00:25 +0000 UTC" firstStartedPulling="2025-10-01 14:00:43.078258554 +0000 UTC m=+965.822234762" lastFinishedPulling="2025-10-01 14:01:01.398173886 +0000 UTC m=+984.142150094" observedRunningTime="2025-10-01 14:01:01.962051754 +0000 UTC m=+984.706027962" watchObservedRunningTime="2025-10-01 14:01:01.978983133 +0000 UTC m=+984.722959341" Oct 01 14:01:01 crc kubenswrapper[4605]: I1001 14:01:01.984785 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=17.057409404 podStartE2EDuration="34.98476926s" podCreationTimestamp="2025-10-01 14:00:27 +0000 UTC" firstStartedPulling="2025-10-01 14:00:43.48420048 +0000 UTC m=+966.228176678" lastFinishedPulling="2025-10-01 14:01:01.411560326 +0000 UTC m=+984.155536534" observedRunningTime="2025-10-01 14:01:01.979976818 +0000 UTC m=+984.723953026" watchObservedRunningTime="2025-10-01 14:01:01.98476926 +0000 UTC m=+984.728745468" Oct 01 14:01:02 crc kubenswrapper[4605]: I1001 14:01:02.005623 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-x8zcm" podStartSLOduration=3.073725822 podStartE2EDuration="9.005608109s" podCreationTimestamp="2025-10-01 14:00:53 +0000 UTC" firstStartedPulling="2025-10-01 14:00:55.464937035 +0000 UTC m=+978.208913243" lastFinishedPulling="2025-10-01 14:01:01.396819322 +0000 UTC m=+984.140795530" observedRunningTime="2025-10-01 14:01:02.004360097 +0000 UTC m=+984.748336295" watchObservedRunningTime="2025-10-01 14:01:02.005608109 +0000 UTC m=+984.749584317" Oct 01 14:01:02 crc kubenswrapper[4605]: I1001 14:01:02.033108 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-csqtk" podStartSLOduration=25.882918628 podStartE2EDuration="37.033076856s" podCreationTimestamp="2025-10-01 14:00:25 +0000 UTC" firstStartedPulling="2025-10-01 14:00:43.612396535 +0000 UTC m=+966.356372733" lastFinishedPulling="2025-10-01 14:00:54.762554743 +0000 UTC m=+977.506530961" observedRunningTime="2025-10-01 14:01:02.025875033 +0000 UTC m=+984.769851241" watchObservedRunningTime="2025-10-01 14:01:02.033076856 +0000 UTC m=+984.777053054" Oct 01 14:01:02 crc kubenswrapper[4605]: I1001 14:01:02.236373 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7cb5889db5-2vm8x" Oct 01 14:01:02 crc kubenswrapper[4605]: I1001 14:01:02.288422 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-6c8xk"] Oct 01 14:01:02 crc kubenswrapper[4605]: I1001 14:01:02.644354 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-6c8xk" Oct 01 14:01:02 crc kubenswrapper[4605]: I1001 14:01:02.702332 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e0d99f04-732e-4ed6-8d80-f2f4eeb723e1-config\") pod \"e0d99f04-732e-4ed6-8d80-f2f4eeb723e1\" (UID: \"e0d99f04-732e-4ed6-8d80-f2f4eeb723e1\") " Oct 01 14:01:02 crc kubenswrapper[4605]: I1001 14:01:02.702405 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e0d99f04-732e-4ed6-8d80-f2f4eeb723e1-dns-svc\") pod \"e0d99f04-732e-4ed6-8d80-f2f4eeb723e1\" (UID: \"e0d99f04-732e-4ed6-8d80-f2f4eeb723e1\") " Oct 01 14:01:02 crc kubenswrapper[4605]: I1001 14:01:02.702499 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v2wgh\" (UniqueName: \"kubernetes.io/projected/e0d99f04-732e-4ed6-8d80-f2f4eeb723e1-kube-api-access-v2wgh\") pod \"e0d99f04-732e-4ed6-8d80-f2f4eeb723e1\" (UID: \"e0d99f04-732e-4ed6-8d80-f2f4eeb723e1\") " Oct 01 14:01:02 crc kubenswrapper[4605]: I1001 14:01:02.709507 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e0d99f04-732e-4ed6-8d80-f2f4eeb723e1-kube-api-access-v2wgh" (OuterVolumeSpecName: "kube-api-access-v2wgh") pod "e0d99f04-732e-4ed6-8d80-f2f4eeb723e1" (UID: "e0d99f04-732e-4ed6-8d80-f2f4eeb723e1"). InnerVolumeSpecName "kube-api-access-v2wgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:01:02 crc kubenswrapper[4605]: I1001 14:01:02.739819 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e0d99f04-732e-4ed6-8d80-f2f4eeb723e1-config" (OuterVolumeSpecName: "config") pod "e0d99f04-732e-4ed6-8d80-f2f4eeb723e1" (UID: "e0d99f04-732e-4ed6-8d80-f2f4eeb723e1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:01:02 crc kubenswrapper[4605]: I1001 14:01:02.746065 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e0d99f04-732e-4ed6-8d80-f2f4eeb723e1-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "e0d99f04-732e-4ed6-8d80-f2f4eeb723e1" (UID: "e0d99f04-732e-4ed6-8d80-f2f4eeb723e1"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:01:02 crc kubenswrapper[4605]: I1001 14:01:02.762123 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Oct 01 14:01:02 crc kubenswrapper[4605]: I1001 14:01:02.804176 4605 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e0d99f04-732e-4ed6-8d80-f2f4eeb723e1-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 01 14:01:02 crc kubenswrapper[4605]: I1001 14:01:02.804210 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v2wgh\" (UniqueName: \"kubernetes.io/projected/e0d99f04-732e-4ed6-8d80-f2f4eeb723e1-kube-api-access-v2wgh\") on node \"crc\" DevicePath \"\"" Oct 01 14:01:02 crc kubenswrapper[4605]: I1001 14:01:02.804223 4605 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e0d99f04-732e-4ed6-8d80-f2f4eeb723e1-config\") on node \"crc\" DevicePath \"\"" Oct 01 14:01:02 crc kubenswrapper[4605]: I1001 14:01:02.809989 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Oct 01 14:01:02 crc kubenswrapper[4605]: I1001 14:01:02.961968 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-6c8xk" Oct 01 14:01:02 crc kubenswrapper[4605]: I1001 14:01:02.961968 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-6c8xk" event={"ID":"e0d99f04-732e-4ed6-8d80-f2f4eeb723e1","Type":"ContainerDied","Data":"770e1711c5bed729719a1a7e2b4a4ec3efb0ea731c5ccb5fd4b198b0da59ece9"} Oct 01 14:01:02 crc kubenswrapper[4605]: I1001 14:01:02.962031 4605 scope.go:117] "RemoveContainer" containerID="627eeff6d8baf6c1be93ded39fd85eabf17a6af3e4f0e503f0374860cb73252e" Oct 01 14:01:02 crc kubenswrapper[4605]: I1001 14:01:02.962747 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Oct 01 14:01:03 crc kubenswrapper[4605]: I1001 14:01:03.022764 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-6c8xk"] Oct 01 14:01:03 crc kubenswrapper[4605]: I1001 14:01:03.032629 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-6c8xk"] Oct 01 14:01:03 crc kubenswrapper[4605]: I1001 14:01:03.213033 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Oct 01 14:01:03 crc kubenswrapper[4605]: I1001 14:01:03.297466 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Oct 01 14:01:03 crc kubenswrapper[4605]: I1001 14:01:03.412255 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Oct 01 14:01:03 crc kubenswrapper[4605]: I1001 14:01:03.421792 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Oct 01 14:01:03 crc kubenswrapper[4605]: I1001 14:01:03.506174 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-74f6f696b9-ntlsl"] Oct 01 14:01:03 crc kubenswrapper[4605]: E1001 14:01:03.506477 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0d99f04-732e-4ed6-8d80-f2f4eeb723e1" containerName="init" Oct 01 14:01:03 crc kubenswrapper[4605]: I1001 14:01:03.506491 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0d99f04-732e-4ed6-8d80-f2f4eeb723e1" containerName="init" Oct 01 14:01:03 crc kubenswrapper[4605]: I1001 14:01:03.506645 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0d99f04-732e-4ed6-8d80-f2f4eeb723e1" containerName="init" Oct 01 14:01:03 crc kubenswrapper[4605]: I1001 14:01:03.507392 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74f6f696b9-ntlsl" Oct 01 14:01:03 crc kubenswrapper[4605]: I1001 14:01:03.528192 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Oct 01 14:01:03 crc kubenswrapper[4605]: I1001 14:01:03.547940 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-74f6f696b9-ntlsl"] Oct 01 14:01:03 crc kubenswrapper[4605]: I1001 14:01:03.617717 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/184ee06f-8871-45cb-a77e-5ab6e740b756-dns-svc\") pod \"dnsmasq-dns-74f6f696b9-ntlsl\" (UID: \"184ee06f-8871-45cb-a77e-5ab6e740b756\") " pod="openstack/dnsmasq-dns-74f6f696b9-ntlsl" Oct 01 14:01:03 crc kubenswrapper[4605]: I1001 14:01:03.617768 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/184ee06f-8871-45cb-a77e-5ab6e740b756-config\") pod \"dnsmasq-dns-74f6f696b9-ntlsl\" (UID: \"184ee06f-8871-45cb-a77e-5ab6e740b756\") " pod="openstack/dnsmasq-dns-74f6f696b9-ntlsl" Oct 01 14:01:03 crc kubenswrapper[4605]: I1001 14:01:03.617835 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fbrfz\" (UniqueName: \"kubernetes.io/projected/184ee06f-8871-45cb-a77e-5ab6e740b756-kube-api-access-fbrfz\") pod \"dnsmasq-dns-74f6f696b9-ntlsl\" (UID: \"184ee06f-8871-45cb-a77e-5ab6e740b756\") " pod="openstack/dnsmasq-dns-74f6f696b9-ntlsl" Oct 01 14:01:03 crc kubenswrapper[4605]: I1001 14:01:03.617930 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/184ee06f-8871-45cb-a77e-5ab6e740b756-ovsdbserver-nb\") pod \"dnsmasq-dns-74f6f696b9-ntlsl\" (UID: \"184ee06f-8871-45cb-a77e-5ab6e740b756\") " pod="openstack/dnsmasq-dns-74f6f696b9-ntlsl" Oct 01 14:01:03 crc kubenswrapper[4605]: I1001 14:01:03.719762 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/184ee06f-8871-45cb-a77e-5ab6e740b756-dns-svc\") pod \"dnsmasq-dns-74f6f696b9-ntlsl\" (UID: \"184ee06f-8871-45cb-a77e-5ab6e740b756\") " pod="openstack/dnsmasq-dns-74f6f696b9-ntlsl" Oct 01 14:01:03 crc kubenswrapper[4605]: I1001 14:01:03.719811 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/184ee06f-8871-45cb-a77e-5ab6e740b756-config\") pod \"dnsmasq-dns-74f6f696b9-ntlsl\" (UID: \"184ee06f-8871-45cb-a77e-5ab6e740b756\") " pod="openstack/dnsmasq-dns-74f6f696b9-ntlsl" Oct 01 14:01:03 crc kubenswrapper[4605]: I1001 14:01:03.719859 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fbrfz\" (UniqueName: \"kubernetes.io/projected/184ee06f-8871-45cb-a77e-5ab6e740b756-kube-api-access-fbrfz\") pod \"dnsmasq-dns-74f6f696b9-ntlsl\" (UID: \"184ee06f-8871-45cb-a77e-5ab6e740b756\") " pod="openstack/dnsmasq-dns-74f6f696b9-ntlsl" Oct 01 14:01:03 crc kubenswrapper[4605]: I1001 14:01:03.719940 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/184ee06f-8871-45cb-a77e-5ab6e740b756-ovsdbserver-nb\") pod \"dnsmasq-dns-74f6f696b9-ntlsl\" (UID: \"184ee06f-8871-45cb-a77e-5ab6e740b756\") " pod="openstack/dnsmasq-dns-74f6f696b9-ntlsl" Oct 01 14:01:03 crc kubenswrapper[4605]: I1001 14:01:03.720912 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/184ee06f-8871-45cb-a77e-5ab6e740b756-dns-svc\") pod \"dnsmasq-dns-74f6f696b9-ntlsl\" (UID: \"184ee06f-8871-45cb-a77e-5ab6e740b756\") " pod="openstack/dnsmasq-dns-74f6f696b9-ntlsl" Oct 01 14:01:03 crc kubenswrapper[4605]: I1001 14:01:03.720965 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/184ee06f-8871-45cb-a77e-5ab6e740b756-config\") pod \"dnsmasq-dns-74f6f696b9-ntlsl\" (UID: \"184ee06f-8871-45cb-a77e-5ab6e740b756\") " pod="openstack/dnsmasq-dns-74f6f696b9-ntlsl" Oct 01 14:01:03 crc kubenswrapper[4605]: I1001 14:01:03.721065 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/184ee06f-8871-45cb-a77e-5ab6e740b756-ovsdbserver-nb\") pod \"dnsmasq-dns-74f6f696b9-ntlsl\" (UID: \"184ee06f-8871-45cb-a77e-5ab6e740b756\") " pod="openstack/dnsmasq-dns-74f6f696b9-ntlsl" Oct 01 14:01:03 crc kubenswrapper[4605]: I1001 14:01:03.744206 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fbrfz\" (UniqueName: \"kubernetes.io/projected/184ee06f-8871-45cb-a77e-5ab6e740b756-kube-api-access-fbrfz\") pod \"dnsmasq-dns-74f6f696b9-ntlsl\" (UID: \"184ee06f-8871-45cb-a77e-5ab6e740b756\") " pod="openstack/dnsmasq-dns-74f6f696b9-ntlsl" Oct 01 14:01:03 crc kubenswrapper[4605]: I1001 14:01:03.758843 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-ppfrt"] Oct 01 14:01:03 crc kubenswrapper[4605]: I1001 14:01:03.763143 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-ppfrt" Oct 01 14:01:03 crc kubenswrapper[4605]: I1001 14:01:03.765444 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Oct 01 14:01:03 crc kubenswrapper[4605]: I1001 14:01:03.773346 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-ppfrt"] Oct 01 14:01:03 crc kubenswrapper[4605]: I1001 14:01:03.822884 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/2f65a22d-7b5c-43b7-88dc-c94ce75bf8b4-ovs-rundir\") pod \"ovn-controller-metrics-ppfrt\" (UID: \"2f65a22d-7b5c-43b7-88dc-c94ce75bf8b4\") " pod="openstack/ovn-controller-metrics-ppfrt" Oct 01 14:01:03 crc kubenswrapper[4605]: I1001 14:01:03.822968 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/2f65a22d-7b5c-43b7-88dc-c94ce75bf8b4-ovn-rundir\") pod \"ovn-controller-metrics-ppfrt\" (UID: \"2f65a22d-7b5c-43b7-88dc-c94ce75bf8b4\") " pod="openstack/ovn-controller-metrics-ppfrt" Oct 01 14:01:03 crc kubenswrapper[4605]: I1001 14:01:03.823013 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bzp7r\" (UniqueName: \"kubernetes.io/projected/2f65a22d-7b5c-43b7-88dc-c94ce75bf8b4-kube-api-access-bzp7r\") pod \"ovn-controller-metrics-ppfrt\" (UID: \"2f65a22d-7b5c-43b7-88dc-c94ce75bf8b4\") " pod="openstack/ovn-controller-metrics-ppfrt" Oct 01 14:01:03 crc kubenswrapper[4605]: I1001 14:01:03.823037 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2f65a22d-7b5c-43b7-88dc-c94ce75bf8b4-config\") pod \"ovn-controller-metrics-ppfrt\" (UID: \"2f65a22d-7b5c-43b7-88dc-c94ce75bf8b4\") " pod="openstack/ovn-controller-metrics-ppfrt" Oct 01 14:01:03 crc kubenswrapper[4605]: I1001 14:01:03.823100 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f65a22d-7b5c-43b7-88dc-c94ce75bf8b4-combined-ca-bundle\") pod \"ovn-controller-metrics-ppfrt\" (UID: \"2f65a22d-7b5c-43b7-88dc-c94ce75bf8b4\") " pod="openstack/ovn-controller-metrics-ppfrt" Oct 01 14:01:03 crc kubenswrapper[4605]: I1001 14:01:03.823126 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/2f65a22d-7b5c-43b7-88dc-c94ce75bf8b4-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-ppfrt\" (UID: \"2f65a22d-7b5c-43b7-88dc-c94ce75bf8b4\") " pod="openstack/ovn-controller-metrics-ppfrt" Oct 01 14:01:03 crc kubenswrapper[4605]: I1001 14:01:03.828855 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74f6f696b9-ntlsl" Oct 01 14:01:03 crc kubenswrapper[4605]: I1001 14:01:03.924318 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f65a22d-7b5c-43b7-88dc-c94ce75bf8b4-combined-ca-bundle\") pod \"ovn-controller-metrics-ppfrt\" (UID: \"2f65a22d-7b5c-43b7-88dc-c94ce75bf8b4\") " pod="openstack/ovn-controller-metrics-ppfrt" Oct 01 14:01:03 crc kubenswrapper[4605]: I1001 14:01:03.924388 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/2f65a22d-7b5c-43b7-88dc-c94ce75bf8b4-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-ppfrt\" (UID: \"2f65a22d-7b5c-43b7-88dc-c94ce75bf8b4\") " pod="openstack/ovn-controller-metrics-ppfrt" Oct 01 14:01:03 crc kubenswrapper[4605]: I1001 14:01:03.924441 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/2f65a22d-7b5c-43b7-88dc-c94ce75bf8b4-ovs-rundir\") pod \"ovn-controller-metrics-ppfrt\" (UID: \"2f65a22d-7b5c-43b7-88dc-c94ce75bf8b4\") " pod="openstack/ovn-controller-metrics-ppfrt" Oct 01 14:01:03 crc kubenswrapper[4605]: I1001 14:01:03.924504 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/2f65a22d-7b5c-43b7-88dc-c94ce75bf8b4-ovn-rundir\") pod \"ovn-controller-metrics-ppfrt\" (UID: \"2f65a22d-7b5c-43b7-88dc-c94ce75bf8b4\") " pod="openstack/ovn-controller-metrics-ppfrt" Oct 01 14:01:03 crc kubenswrapper[4605]: I1001 14:01:03.924564 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bzp7r\" (UniqueName: \"kubernetes.io/projected/2f65a22d-7b5c-43b7-88dc-c94ce75bf8b4-kube-api-access-bzp7r\") pod \"ovn-controller-metrics-ppfrt\" (UID: \"2f65a22d-7b5c-43b7-88dc-c94ce75bf8b4\") " pod="openstack/ovn-controller-metrics-ppfrt" Oct 01 14:01:03 crc kubenswrapper[4605]: I1001 14:01:03.924591 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2f65a22d-7b5c-43b7-88dc-c94ce75bf8b4-config\") pod \"ovn-controller-metrics-ppfrt\" (UID: \"2f65a22d-7b5c-43b7-88dc-c94ce75bf8b4\") " pod="openstack/ovn-controller-metrics-ppfrt" Oct 01 14:01:03 crc kubenswrapper[4605]: I1001 14:01:03.925755 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2f65a22d-7b5c-43b7-88dc-c94ce75bf8b4-config\") pod \"ovn-controller-metrics-ppfrt\" (UID: \"2f65a22d-7b5c-43b7-88dc-c94ce75bf8b4\") " pod="openstack/ovn-controller-metrics-ppfrt" Oct 01 14:01:03 crc kubenswrapper[4605]: I1001 14:01:03.925751 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/2f65a22d-7b5c-43b7-88dc-c94ce75bf8b4-ovs-rundir\") pod \"ovn-controller-metrics-ppfrt\" (UID: \"2f65a22d-7b5c-43b7-88dc-c94ce75bf8b4\") " pod="openstack/ovn-controller-metrics-ppfrt" Oct 01 14:01:03 crc kubenswrapper[4605]: I1001 14:01:03.925852 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/2f65a22d-7b5c-43b7-88dc-c94ce75bf8b4-ovn-rundir\") pod \"ovn-controller-metrics-ppfrt\" (UID: \"2f65a22d-7b5c-43b7-88dc-c94ce75bf8b4\") " pod="openstack/ovn-controller-metrics-ppfrt" Oct 01 14:01:03 crc kubenswrapper[4605]: I1001 14:01:03.932270 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/2f65a22d-7b5c-43b7-88dc-c94ce75bf8b4-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-ppfrt\" (UID: \"2f65a22d-7b5c-43b7-88dc-c94ce75bf8b4\") " pod="openstack/ovn-controller-metrics-ppfrt" Oct 01 14:01:03 crc kubenswrapper[4605]: I1001 14:01:03.938773 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f65a22d-7b5c-43b7-88dc-c94ce75bf8b4-combined-ca-bundle\") pod \"ovn-controller-metrics-ppfrt\" (UID: \"2f65a22d-7b5c-43b7-88dc-c94ce75bf8b4\") " pod="openstack/ovn-controller-metrics-ppfrt" Oct 01 14:01:03 crc kubenswrapper[4605]: I1001 14:01:03.946378 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bzp7r\" (UniqueName: \"kubernetes.io/projected/2f65a22d-7b5c-43b7-88dc-c94ce75bf8b4-kube-api-access-bzp7r\") pod \"ovn-controller-metrics-ppfrt\" (UID: \"2f65a22d-7b5c-43b7-88dc-c94ce75bf8b4\") " pod="openstack/ovn-controller-metrics-ppfrt" Oct 01 14:01:03 crc kubenswrapper[4605]: I1001 14:01:03.946929 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e0d99f04-732e-4ed6-8d80-f2f4eeb723e1" path="/var/lib/kubelet/pods/e0d99f04-732e-4ed6-8d80-f2f4eeb723e1/volumes" Oct 01 14:01:04 crc kubenswrapper[4605]: I1001 14:01:04.109700 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-74f6f696b9-ntlsl"] Oct 01 14:01:04 crc kubenswrapper[4605]: I1001 14:01:04.120486 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-ppfrt" Oct 01 14:01:04 crc kubenswrapper[4605]: I1001 14:01:04.167863 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-698758b865-l45gl"] Oct 01 14:01:04 crc kubenswrapper[4605]: I1001 14:01:04.169138 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-l45gl" Oct 01 14:01:04 crc kubenswrapper[4605]: I1001 14:01:04.173634 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Oct 01 14:01:04 crc kubenswrapper[4605]: I1001 14:01:04.196777 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-698758b865-l45gl"] Oct 01 14:01:04 crc kubenswrapper[4605]: I1001 14:01:04.337052 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hrtc7\" (UniqueName: \"kubernetes.io/projected/b7288927-685b-405e-89a7-439e0f377750-kube-api-access-hrtc7\") pod \"dnsmasq-dns-698758b865-l45gl\" (UID: \"b7288927-685b-405e-89a7-439e0f377750\") " pod="openstack/dnsmasq-dns-698758b865-l45gl" Oct 01 14:01:04 crc kubenswrapper[4605]: I1001 14:01:04.337244 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b7288927-685b-405e-89a7-439e0f377750-dns-svc\") pod \"dnsmasq-dns-698758b865-l45gl\" (UID: \"b7288927-685b-405e-89a7-439e0f377750\") " pod="openstack/dnsmasq-dns-698758b865-l45gl" Oct 01 14:01:04 crc kubenswrapper[4605]: I1001 14:01:04.337327 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b7288927-685b-405e-89a7-439e0f377750-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-l45gl\" (UID: \"b7288927-685b-405e-89a7-439e0f377750\") " pod="openstack/dnsmasq-dns-698758b865-l45gl" Oct 01 14:01:04 crc kubenswrapper[4605]: I1001 14:01:04.337488 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b7288927-685b-405e-89a7-439e0f377750-config\") pod \"dnsmasq-dns-698758b865-l45gl\" (UID: \"b7288927-685b-405e-89a7-439e0f377750\") " pod="openstack/dnsmasq-dns-698758b865-l45gl" Oct 01 14:01:04 crc kubenswrapper[4605]: I1001 14:01:04.337601 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b7288927-685b-405e-89a7-439e0f377750-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-l45gl\" (UID: \"b7288927-685b-405e-89a7-439e0f377750\") " pod="openstack/dnsmasq-dns-698758b865-l45gl" Oct 01 14:01:04 crc kubenswrapper[4605]: I1001 14:01:04.413074 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Oct 01 14:01:04 crc kubenswrapper[4605]: I1001 14:01:04.439396 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b7288927-685b-405e-89a7-439e0f377750-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-l45gl\" (UID: \"b7288927-685b-405e-89a7-439e0f377750\") " pod="openstack/dnsmasq-dns-698758b865-l45gl" Oct 01 14:01:04 crc kubenswrapper[4605]: I1001 14:01:04.439781 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hrtc7\" (UniqueName: \"kubernetes.io/projected/b7288927-685b-405e-89a7-439e0f377750-kube-api-access-hrtc7\") pod \"dnsmasq-dns-698758b865-l45gl\" (UID: \"b7288927-685b-405e-89a7-439e0f377750\") " pod="openstack/dnsmasq-dns-698758b865-l45gl" Oct 01 14:01:04 crc kubenswrapper[4605]: I1001 14:01:04.439818 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b7288927-685b-405e-89a7-439e0f377750-dns-svc\") pod \"dnsmasq-dns-698758b865-l45gl\" (UID: \"b7288927-685b-405e-89a7-439e0f377750\") " pod="openstack/dnsmasq-dns-698758b865-l45gl" Oct 01 14:01:04 crc kubenswrapper[4605]: I1001 14:01:04.439850 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b7288927-685b-405e-89a7-439e0f377750-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-l45gl\" (UID: \"b7288927-685b-405e-89a7-439e0f377750\") " pod="openstack/dnsmasq-dns-698758b865-l45gl" Oct 01 14:01:04 crc kubenswrapper[4605]: I1001 14:01:04.439915 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b7288927-685b-405e-89a7-439e0f377750-config\") pod \"dnsmasq-dns-698758b865-l45gl\" (UID: \"b7288927-685b-405e-89a7-439e0f377750\") " pod="openstack/dnsmasq-dns-698758b865-l45gl" Oct 01 14:01:04 crc kubenswrapper[4605]: I1001 14:01:04.440847 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b7288927-685b-405e-89a7-439e0f377750-config\") pod \"dnsmasq-dns-698758b865-l45gl\" (UID: \"b7288927-685b-405e-89a7-439e0f377750\") " pod="openstack/dnsmasq-dns-698758b865-l45gl" Oct 01 14:01:04 crc kubenswrapper[4605]: I1001 14:01:04.442074 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b7288927-685b-405e-89a7-439e0f377750-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-l45gl\" (UID: \"b7288927-685b-405e-89a7-439e0f377750\") " pod="openstack/dnsmasq-dns-698758b865-l45gl" Oct 01 14:01:04 crc kubenswrapper[4605]: I1001 14:01:04.442605 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b7288927-685b-405e-89a7-439e0f377750-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-l45gl\" (UID: \"b7288927-685b-405e-89a7-439e0f377750\") " pod="openstack/dnsmasq-dns-698758b865-l45gl" Oct 01 14:01:04 crc kubenswrapper[4605]: I1001 14:01:04.442650 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b7288927-685b-405e-89a7-439e0f377750-dns-svc\") pod \"dnsmasq-dns-698758b865-l45gl\" (UID: \"b7288927-685b-405e-89a7-439e0f377750\") " pod="openstack/dnsmasq-dns-698758b865-l45gl" Oct 01 14:01:04 crc kubenswrapper[4605]: I1001 14:01:04.467862 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Oct 01 14:01:04 crc kubenswrapper[4605]: I1001 14:01:04.468981 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hrtc7\" (UniqueName: \"kubernetes.io/projected/b7288927-685b-405e-89a7-439e0f377750-kube-api-access-hrtc7\") pod \"dnsmasq-dns-698758b865-l45gl\" (UID: \"b7288927-685b-405e-89a7-439e0f377750\") " pod="openstack/dnsmasq-dns-698758b865-l45gl" Oct 01 14:01:04 crc kubenswrapper[4605]: I1001 14:01:04.498633 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-l45gl" Oct 01 14:01:04 crc kubenswrapper[4605]: I1001 14:01:04.507955 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-74f6f696b9-ntlsl"] Oct 01 14:01:04 crc kubenswrapper[4605]: I1001 14:01:04.661388 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-ppfrt"] Oct 01 14:01:04 crc kubenswrapper[4605]: I1001 14:01:04.969024 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-698758b865-l45gl"] Oct 01 14:01:04 crc kubenswrapper[4605]: I1001 14:01:04.989141 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-ppfrt" event={"ID":"2f65a22d-7b5c-43b7-88dc-c94ce75bf8b4","Type":"ContainerStarted","Data":"a9501ab4a5a17f10d55d6dc3907f367c181749f2237ebe253601d239c919bf7a"} Oct 01 14:01:04 crc kubenswrapper[4605]: I1001 14:01:04.989416 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-ppfrt" event={"ID":"2f65a22d-7b5c-43b7-88dc-c94ce75bf8b4","Type":"ContainerStarted","Data":"ffb1ef049f9c87f8637e269b5a3379bb023cab79e776ce453c2105c8f7d27618"} Oct 01 14:01:04 crc kubenswrapper[4605]: I1001 14:01:04.992492 4605 generic.go:334] "Generic (PLEG): container finished" podID="184ee06f-8871-45cb-a77e-5ab6e740b756" containerID="645f3ce2b8990c046aa695e1d7a9f8dc049316b07dfeae4f022aa05a37e9ac2a" exitCode=0 Oct 01 14:01:04 crc kubenswrapper[4605]: I1001 14:01:04.992549 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74f6f696b9-ntlsl" event={"ID":"184ee06f-8871-45cb-a77e-5ab6e740b756","Type":"ContainerDied","Data":"645f3ce2b8990c046aa695e1d7a9f8dc049316b07dfeae4f022aa05a37e9ac2a"} Oct 01 14:01:04 crc kubenswrapper[4605]: I1001 14:01:04.992576 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74f6f696b9-ntlsl" event={"ID":"184ee06f-8871-45cb-a77e-5ab6e740b756","Type":"ContainerStarted","Data":"377b760db69d93c121af5a69e7577622e3bf26770a0932794e0064eb0be69bc3"} Oct 01 14:01:05 crc kubenswrapper[4605]: I1001 14:01:05.071705 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Oct 01 14:01:05 crc kubenswrapper[4605]: I1001 14:01:05.292249 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74f6f696b9-ntlsl" Oct 01 14:01:05 crc kubenswrapper[4605]: I1001 14:01:05.354281 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Oct 01 14:01:05 crc kubenswrapper[4605]: E1001 14:01:05.354672 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="184ee06f-8871-45cb-a77e-5ab6e740b756" containerName="init" Oct 01 14:01:05 crc kubenswrapper[4605]: I1001 14:01:05.354697 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="184ee06f-8871-45cb-a77e-5ab6e740b756" containerName="init" Oct 01 14:01:05 crc kubenswrapper[4605]: I1001 14:01:05.354921 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="184ee06f-8871-45cb-a77e-5ab6e740b756" containerName="init" Oct 01 14:01:05 crc kubenswrapper[4605]: I1001 14:01:05.355779 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Oct 01 14:01:05 crc kubenswrapper[4605]: I1001 14:01:05.356887 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/184ee06f-8871-45cb-a77e-5ab6e740b756-config\") pod \"184ee06f-8871-45cb-a77e-5ab6e740b756\" (UID: \"184ee06f-8871-45cb-a77e-5ab6e740b756\") " Oct 01 14:01:05 crc kubenswrapper[4605]: I1001 14:01:05.356993 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/184ee06f-8871-45cb-a77e-5ab6e740b756-ovsdbserver-nb\") pod \"184ee06f-8871-45cb-a77e-5ab6e740b756\" (UID: \"184ee06f-8871-45cb-a77e-5ab6e740b756\") " Oct 01 14:01:05 crc kubenswrapper[4605]: I1001 14:01:05.357052 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/184ee06f-8871-45cb-a77e-5ab6e740b756-dns-svc\") pod \"184ee06f-8871-45cb-a77e-5ab6e740b756\" (UID: \"184ee06f-8871-45cb-a77e-5ab6e740b756\") " Oct 01 14:01:05 crc kubenswrapper[4605]: I1001 14:01:05.357126 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fbrfz\" (UniqueName: \"kubernetes.io/projected/184ee06f-8871-45cb-a77e-5ab6e740b756-kube-api-access-fbrfz\") pod \"184ee06f-8871-45cb-a77e-5ab6e740b756\" (UID: \"184ee06f-8871-45cb-a77e-5ab6e740b756\") " Oct 01 14:01:05 crc kubenswrapper[4605]: I1001 14:01:05.361644 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Oct 01 14:01:05 crc kubenswrapper[4605]: I1001 14:01:05.361986 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Oct 01 14:01:05 crc kubenswrapper[4605]: I1001 14:01:05.362148 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Oct 01 14:01:05 crc kubenswrapper[4605]: I1001 14:01:05.362295 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-w44h8" Oct 01 14:01:05 crc kubenswrapper[4605]: I1001 14:01:05.368567 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/184ee06f-8871-45cb-a77e-5ab6e740b756-kube-api-access-fbrfz" (OuterVolumeSpecName: "kube-api-access-fbrfz") pod "184ee06f-8871-45cb-a77e-5ab6e740b756" (UID: "184ee06f-8871-45cb-a77e-5ab6e740b756"). InnerVolumeSpecName "kube-api-access-fbrfz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:01:05 crc kubenswrapper[4605]: I1001 14:01:05.384806 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Oct 01 14:01:05 crc kubenswrapper[4605]: I1001 14:01:05.404968 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/184ee06f-8871-45cb-a77e-5ab6e740b756-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "184ee06f-8871-45cb-a77e-5ab6e740b756" (UID: "184ee06f-8871-45cb-a77e-5ab6e740b756"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:01:05 crc kubenswrapper[4605]: I1001 14:01:05.418000 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/184ee06f-8871-45cb-a77e-5ab6e740b756-config" (OuterVolumeSpecName: "config") pod "184ee06f-8871-45cb-a77e-5ab6e740b756" (UID: "184ee06f-8871-45cb-a77e-5ab6e740b756"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:01:05 crc kubenswrapper[4605]: I1001 14:01:05.422123 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/184ee06f-8871-45cb-a77e-5ab6e740b756-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "184ee06f-8871-45cb-a77e-5ab6e740b756" (UID: "184ee06f-8871-45cb-a77e-5ab6e740b756"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:01:05 crc kubenswrapper[4605]: I1001 14:01:05.458634 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e2f8bf30-b59a-4564-b6d2-5f201b0fe957-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"e2f8bf30-b59a-4564-b6d2-5f201b0fe957\") " pod="openstack/ovn-northd-0" Oct 01 14:01:05 crc kubenswrapper[4605]: I1001 14:01:05.458735 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g6kts\" (UniqueName: \"kubernetes.io/projected/e2f8bf30-b59a-4564-b6d2-5f201b0fe957-kube-api-access-g6kts\") pod \"ovn-northd-0\" (UID: \"e2f8bf30-b59a-4564-b6d2-5f201b0fe957\") " pod="openstack/ovn-northd-0" Oct 01 14:01:05 crc kubenswrapper[4605]: I1001 14:01:05.458763 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2f8bf30-b59a-4564-b6d2-5f201b0fe957-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"e2f8bf30-b59a-4564-b6d2-5f201b0fe957\") " pod="openstack/ovn-northd-0" Oct 01 14:01:05 crc kubenswrapper[4605]: I1001 14:01:05.458794 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/e2f8bf30-b59a-4564-b6d2-5f201b0fe957-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"e2f8bf30-b59a-4564-b6d2-5f201b0fe957\") " pod="openstack/ovn-northd-0" Oct 01 14:01:05 crc kubenswrapper[4605]: I1001 14:01:05.458822 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e2f8bf30-b59a-4564-b6d2-5f201b0fe957-scripts\") pod \"ovn-northd-0\" (UID: \"e2f8bf30-b59a-4564-b6d2-5f201b0fe957\") " pod="openstack/ovn-northd-0" Oct 01 14:01:05 crc kubenswrapper[4605]: I1001 14:01:05.458859 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e2f8bf30-b59a-4564-b6d2-5f201b0fe957-config\") pod \"ovn-northd-0\" (UID: \"e2f8bf30-b59a-4564-b6d2-5f201b0fe957\") " pod="openstack/ovn-northd-0" Oct 01 14:01:05 crc kubenswrapper[4605]: I1001 14:01:05.458886 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/e2f8bf30-b59a-4564-b6d2-5f201b0fe957-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"e2f8bf30-b59a-4564-b6d2-5f201b0fe957\") " pod="openstack/ovn-northd-0" Oct 01 14:01:05 crc kubenswrapper[4605]: I1001 14:01:05.458977 4605 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/184ee06f-8871-45cb-a77e-5ab6e740b756-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 01 14:01:05 crc kubenswrapper[4605]: I1001 14:01:05.458987 4605 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/184ee06f-8871-45cb-a77e-5ab6e740b756-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 01 14:01:05 crc kubenswrapper[4605]: I1001 14:01:05.458999 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fbrfz\" (UniqueName: \"kubernetes.io/projected/184ee06f-8871-45cb-a77e-5ab6e740b756-kube-api-access-fbrfz\") on node \"crc\" DevicePath \"\"" Oct 01 14:01:05 crc kubenswrapper[4605]: I1001 14:01:05.459010 4605 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/184ee06f-8871-45cb-a77e-5ab6e740b756-config\") on node \"crc\" DevicePath \"\"" Oct 01 14:01:05 crc kubenswrapper[4605]: I1001 14:01:05.560551 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e2f8bf30-b59a-4564-b6d2-5f201b0fe957-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"e2f8bf30-b59a-4564-b6d2-5f201b0fe957\") " pod="openstack/ovn-northd-0" Oct 01 14:01:05 crc kubenswrapper[4605]: I1001 14:01:05.560631 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g6kts\" (UniqueName: \"kubernetes.io/projected/e2f8bf30-b59a-4564-b6d2-5f201b0fe957-kube-api-access-g6kts\") pod \"ovn-northd-0\" (UID: \"e2f8bf30-b59a-4564-b6d2-5f201b0fe957\") " pod="openstack/ovn-northd-0" Oct 01 14:01:05 crc kubenswrapper[4605]: I1001 14:01:05.560673 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2f8bf30-b59a-4564-b6d2-5f201b0fe957-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"e2f8bf30-b59a-4564-b6d2-5f201b0fe957\") " pod="openstack/ovn-northd-0" Oct 01 14:01:05 crc kubenswrapper[4605]: I1001 14:01:05.560716 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/e2f8bf30-b59a-4564-b6d2-5f201b0fe957-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"e2f8bf30-b59a-4564-b6d2-5f201b0fe957\") " pod="openstack/ovn-northd-0" Oct 01 14:01:05 crc kubenswrapper[4605]: I1001 14:01:05.560751 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e2f8bf30-b59a-4564-b6d2-5f201b0fe957-scripts\") pod \"ovn-northd-0\" (UID: \"e2f8bf30-b59a-4564-b6d2-5f201b0fe957\") " pod="openstack/ovn-northd-0" Oct 01 14:01:05 crc kubenswrapper[4605]: I1001 14:01:05.560791 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e2f8bf30-b59a-4564-b6d2-5f201b0fe957-config\") pod \"ovn-northd-0\" (UID: \"e2f8bf30-b59a-4564-b6d2-5f201b0fe957\") " pod="openstack/ovn-northd-0" Oct 01 14:01:05 crc kubenswrapper[4605]: I1001 14:01:05.560828 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/e2f8bf30-b59a-4564-b6d2-5f201b0fe957-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"e2f8bf30-b59a-4564-b6d2-5f201b0fe957\") " pod="openstack/ovn-northd-0" Oct 01 14:01:05 crc kubenswrapper[4605]: I1001 14:01:05.561415 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/e2f8bf30-b59a-4564-b6d2-5f201b0fe957-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"e2f8bf30-b59a-4564-b6d2-5f201b0fe957\") " pod="openstack/ovn-northd-0" Oct 01 14:01:05 crc kubenswrapper[4605]: I1001 14:01:05.562827 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e2f8bf30-b59a-4564-b6d2-5f201b0fe957-scripts\") pod \"ovn-northd-0\" (UID: \"e2f8bf30-b59a-4564-b6d2-5f201b0fe957\") " pod="openstack/ovn-northd-0" Oct 01 14:01:05 crc kubenswrapper[4605]: I1001 14:01:05.563168 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e2f8bf30-b59a-4564-b6d2-5f201b0fe957-config\") pod \"ovn-northd-0\" (UID: \"e2f8bf30-b59a-4564-b6d2-5f201b0fe957\") " pod="openstack/ovn-northd-0" Oct 01 14:01:05 crc kubenswrapper[4605]: I1001 14:01:05.564970 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/e2f8bf30-b59a-4564-b6d2-5f201b0fe957-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"e2f8bf30-b59a-4564-b6d2-5f201b0fe957\") " pod="openstack/ovn-northd-0" Oct 01 14:01:05 crc kubenswrapper[4605]: I1001 14:01:05.565248 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e2f8bf30-b59a-4564-b6d2-5f201b0fe957-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"e2f8bf30-b59a-4564-b6d2-5f201b0fe957\") " pod="openstack/ovn-northd-0" Oct 01 14:01:05 crc kubenswrapper[4605]: I1001 14:01:05.570764 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2f8bf30-b59a-4564-b6d2-5f201b0fe957-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"e2f8bf30-b59a-4564-b6d2-5f201b0fe957\") " pod="openstack/ovn-northd-0" Oct 01 14:01:05 crc kubenswrapper[4605]: I1001 14:01:05.617664 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g6kts\" (UniqueName: \"kubernetes.io/projected/e2f8bf30-b59a-4564-b6d2-5f201b0fe957-kube-api-access-g6kts\") pod \"ovn-northd-0\" (UID: \"e2f8bf30-b59a-4564-b6d2-5f201b0fe957\") " pod="openstack/ovn-northd-0" Oct 01 14:01:05 crc kubenswrapper[4605]: I1001 14:01:05.677596 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Oct 01 14:01:06 crc kubenswrapper[4605]: I1001 14:01:06.001343 4605 generic.go:334] "Generic (PLEG): container finished" podID="b7288927-685b-405e-89a7-439e0f377750" containerID="f1a7608914d7a5d4ccdb930db32904c77c0113e0fc1cfac288997fbc328d8715" exitCode=0 Oct 01 14:01:06 crc kubenswrapper[4605]: I1001 14:01:06.001591 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-l45gl" event={"ID":"b7288927-685b-405e-89a7-439e0f377750","Type":"ContainerDied","Data":"f1a7608914d7a5d4ccdb930db32904c77c0113e0fc1cfac288997fbc328d8715"} Oct 01 14:01:06 crc kubenswrapper[4605]: I1001 14:01:06.001636 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-l45gl" event={"ID":"b7288927-685b-405e-89a7-439e0f377750","Type":"ContainerStarted","Data":"c8cc696bcd70110eea95c035b15222e9f75a53393f1e1a6d8938cb67f665c153"} Oct 01 14:01:06 crc kubenswrapper[4605]: I1001 14:01:06.003246 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74f6f696b9-ntlsl" event={"ID":"184ee06f-8871-45cb-a77e-5ab6e740b756","Type":"ContainerDied","Data":"377b760db69d93c121af5a69e7577622e3bf26770a0932794e0064eb0be69bc3"} Oct 01 14:01:06 crc kubenswrapper[4605]: I1001 14:01:06.003302 4605 scope.go:117] "RemoveContainer" containerID="645f3ce2b8990c046aa695e1d7a9f8dc049316b07dfeae4f022aa05a37e9ac2a" Oct 01 14:01:06 crc kubenswrapper[4605]: I1001 14:01:06.003577 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74f6f696b9-ntlsl" Oct 01 14:01:06 crc kubenswrapper[4605]: I1001 14:01:06.078669 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-ppfrt" podStartSLOduration=3.078654713 podStartE2EDuration="3.078654713s" podCreationTimestamp="2025-10-01 14:01:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 14:01:06.073888332 +0000 UTC m=+988.817864540" watchObservedRunningTime="2025-10-01 14:01:06.078654713 +0000 UTC m=+988.822630921" Oct 01 14:01:06 crc kubenswrapper[4605]: I1001 14:01:06.136069 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-74f6f696b9-ntlsl"] Oct 01 14:01:06 crc kubenswrapper[4605]: I1001 14:01:06.146382 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-74f6f696b9-ntlsl"] Oct 01 14:01:06 crc kubenswrapper[4605]: I1001 14:01:06.308909 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Oct 01 14:01:06 crc kubenswrapper[4605]: W1001 14:01:06.308974 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode2f8bf30_b59a_4564_b6d2_5f201b0fe957.slice/crio-045571f6972c640f5d55596e1a34118a0d7915dbb505c56efe30ff28fdaa3477 WatchSource:0}: Error finding container 045571f6972c640f5d55596e1a34118a0d7915dbb505c56efe30ff28fdaa3477: Status 404 returned error can't find the container with id 045571f6972c640f5d55596e1a34118a0d7915dbb505c56efe30ff28fdaa3477 Oct 01 14:01:07 crc kubenswrapper[4605]: I1001 14:01:07.011135 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"e2f8bf30-b59a-4564-b6d2-5f201b0fe957","Type":"ContainerStarted","Data":"045571f6972c640f5d55596e1a34118a0d7915dbb505c56efe30ff28fdaa3477"} Oct 01 14:01:07 crc kubenswrapper[4605]: I1001 14:01:07.014203 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-l45gl" event={"ID":"b7288927-685b-405e-89a7-439e0f377750","Type":"ContainerStarted","Data":"cf102b919f69638ceae752f31f439b4cb5183e2e1dff48390a33c1e2c8a81d88"} Oct 01 14:01:07 crc kubenswrapper[4605]: I1001 14:01:07.014364 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-698758b865-l45gl" Oct 01 14:01:07 crc kubenswrapper[4605]: I1001 14:01:07.038366 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-698758b865-l45gl" podStartSLOduration=3.038343153 podStartE2EDuration="3.038343153s" podCreationTimestamp="2025-10-01 14:01:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 14:01:07.033325606 +0000 UTC m=+989.777301814" watchObservedRunningTime="2025-10-01 14:01:07.038343153 +0000 UTC m=+989.782319371" Oct 01 14:01:07 crc kubenswrapper[4605]: I1001 14:01:07.936017 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="184ee06f-8871-45cb-a77e-5ab6e740b756" path="/var/lib/kubelet/pods/184ee06f-8871-45cb-a77e-5ab6e740b756/volumes" Oct 01 14:01:09 crc kubenswrapper[4605]: I1001 14:01:09.122592 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/fedbab19-fa82-4d92-b787-de85226cd34f-etc-swift\") pod \"swift-storage-0\" (UID: \"fedbab19-fa82-4d92-b787-de85226cd34f\") " pod="openstack/swift-storage-0" Oct 01 14:01:09 crc kubenswrapper[4605]: E1001 14:01:09.123787 4605 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 01 14:01:09 crc kubenswrapper[4605]: E1001 14:01:09.123815 4605 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 01 14:01:09 crc kubenswrapper[4605]: E1001 14:01:09.123868 4605 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/fedbab19-fa82-4d92-b787-de85226cd34f-etc-swift podName:fedbab19-fa82-4d92-b787-de85226cd34f nodeName:}" failed. No retries permitted until 2025-10-01 14:01:25.123850278 +0000 UTC m=+1007.867826586 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/fedbab19-fa82-4d92-b787-de85226cd34f-etc-swift") pod "swift-storage-0" (UID: "fedbab19-fa82-4d92-b787-de85226cd34f") : configmap "swift-ring-files" not found Oct 01 14:01:09 crc kubenswrapper[4605]: I1001 14:01:09.715808 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-d6qqt"] Oct 01 14:01:09 crc kubenswrapper[4605]: I1001 14:01:09.717179 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-d6qqt" Oct 01 14:01:09 crc kubenswrapper[4605]: I1001 14:01:09.727456 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-d6qqt"] Oct 01 14:01:09 crc kubenswrapper[4605]: I1001 14:01:09.833508 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c2zrc\" (UniqueName: \"kubernetes.io/projected/0effdbf0-5be0-4076-b5a6-2b941e5d16e3-kube-api-access-c2zrc\") pod \"keystone-db-create-d6qqt\" (UID: \"0effdbf0-5be0-4076-b5a6-2b941e5d16e3\") " pod="openstack/keystone-db-create-d6qqt" Oct 01 14:01:09 crc kubenswrapper[4605]: I1001 14:01:09.942422 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c2zrc\" (UniqueName: \"kubernetes.io/projected/0effdbf0-5be0-4076-b5a6-2b941e5d16e3-kube-api-access-c2zrc\") pod \"keystone-db-create-d6qqt\" (UID: \"0effdbf0-5be0-4076-b5a6-2b941e5d16e3\") " pod="openstack/keystone-db-create-d6qqt" Oct 01 14:01:09 crc kubenswrapper[4605]: I1001 14:01:09.956161 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-kfnp9"] Oct 01 14:01:09 crc kubenswrapper[4605]: I1001 14:01:09.957115 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-kfnp9" Oct 01 14:01:09 crc kubenswrapper[4605]: I1001 14:01:09.964800 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c2zrc\" (UniqueName: \"kubernetes.io/projected/0effdbf0-5be0-4076-b5a6-2b941e5d16e3-kube-api-access-c2zrc\") pod \"keystone-db-create-d6qqt\" (UID: \"0effdbf0-5be0-4076-b5a6-2b941e5d16e3\") " pod="openstack/keystone-db-create-d6qqt" Oct 01 14:01:09 crc kubenswrapper[4605]: I1001 14:01:09.966137 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-kfnp9"] Oct 01 14:01:10 crc kubenswrapper[4605]: I1001 14:01:10.032954 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-d6qqt" Oct 01 14:01:10 crc kubenswrapper[4605]: I1001 14:01:10.034773 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"e2f8bf30-b59a-4564-b6d2-5f201b0fe957","Type":"ContainerStarted","Data":"66535664b442df371f0f10268280e8b7ab7521dce71f27031d14f8a4b1e12fda"} Oct 01 14:01:10 crc kubenswrapper[4605]: I1001 14:01:10.034882 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"e2f8bf30-b59a-4564-b6d2-5f201b0fe957","Type":"ContainerStarted","Data":"212ffcdfbbd85b0d02f541ce9ca1d22fb1f6b37c756a0a2199414843d86e0be3"} Oct 01 14:01:10 crc kubenswrapper[4605]: I1001 14:01:10.035076 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Oct 01 14:01:10 crc kubenswrapper[4605]: I1001 14:01:10.057588 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=2.188290808 podStartE2EDuration="5.057565869s" podCreationTimestamp="2025-10-01 14:01:05 +0000 UTC" firstStartedPulling="2025-10-01 14:01:06.313531722 +0000 UTC m=+989.057507930" lastFinishedPulling="2025-10-01 14:01:09.182806773 +0000 UTC m=+991.926782991" observedRunningTime="2025-10-01 14:01:10.055133517 +0000 UTC m=+992.799109715" watchObservedRunningTime="2025-10-01 14:01:10.057565869 +0000 UTC m=+992.801542077" Oct 01 14:01:10 crc kubenswrapper[4605]: I1001 14:01:10.145850 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vhtw5\" (UniqueName: \"kubernetes.io/projected/77127160-38c8-47a8-920b-90a7b2cd8e3f-kube-api-access-vhtw5\") pod \"placement-db-create-kfnp9\" (UID: \"77127160-38c8-47a8-920b-90a7b2cd8e3f\") " pod="openstack/placement-db-create-kfnp9" Oct 01 14:01:10 crc kubenswrapper[4605]: I1001 14:01:10.249619 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vhtw5\" (UniqueName: \"kubernetes.io/projected/77127160-38c8-47a8-920b-90a7b2cd8e3f-kube-api-access-vhtw5\") pod \"placement-db-create-kfnp9\" (UID: \"77127160-38c8-47a8-920b-90a7b2cd8e3f\") " pod="openstack/placement-db-create-kfnp9" Oct 01 14:01:10 crc kubenswrapper[4605]: I1001 14:01:10.288946 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-d6qqt"] Oct 01 14:01:10 crc kubenswrapper[4605]: I1001 14:01:10.302432 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vhtw5\" (UniqueName: \"kubernetes.io/projected/77127160-38c8-47a8-920b-90a7b2cd8e3f-kube-api-access-vhtw5\") pod \"placement-db-create-kfnp9\" (UID: \"77127160-38c8-47a8-920b-90a7b2cd8e3f\") " pod="openstack/placement-db-create-kfnp9" Oct 01 14:01:10 crc kubenswrapper[4605]: I1001 14:01:10.318882 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-tfbpz"] Oct 01 14:01:10 crc kubenswrapper[4605]: I1001 14:01:10.339190 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-tfbpz"] Oct 01 14:01:10 crc kubenswrapper[4605]: I1001 14:01:10.339278 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-tfbpz" Oct 01 14:01:10 crc kubenswrapper[4605]: I1001 14:01:10.357718 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-snkzh\" (UniqueName: \"kubernetes.io/projected/202abc2b-e141-498c-9198-489fbc0e5130-kube-api-access-snkzh\") pod \"glance-db-create-tfbpz\" (UID: \"202abc2b-e141-498c-9198-489fbc0e5130\") " pod="openstack/glance-db-create-tfbpz" Oct 01 14:01:10 crc kubenswrapper[4605]: I1001 14:01:10.459226 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-snkzh\" (UniqueName: \"kubernetes.io/projected/202abc2b-e141-498c-9198-489fbc0e5130-kube-api-access-snkzh\") pod \"glance-db-create-tfbpz\" (UID: \"202abc2b-e141-498c-9198-489fbc0e5130\") " pod="openstack/glance-db-create-tfbpz" Oct 01 14:01:10 crc kubenswrapper[4605]: I1001 14:01:10.481009 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-snkzh\" (UniqueName: \"kubernetes.io/projected/202abc2b-e141-498c-9198-489fbc0e5130-kube-api-access-snkzh\") pod \"glance-db-create-tfbpz\" (UID: \"202abc2b-e141-498c-9198-489fbc0e5130\") " pod="openstack/glance-db-create-tfbpz" Oct 01 14:01:10 crc kubenswrapper[4605]: I1001 14:01:10.573004 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-kfnp9" Oct 01 14:01:10 crc kubenswrapper[4605]: I1001 14:01:10.673623 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-tfbpz" Oct 01 14:01:11 crc kubenswrapper[4605]: I1001 14:01:11.040434 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-kfnp9"] Oct 01 14:01:11 crc kubenswrapper[4605]: W1001 14:01:11.043406 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod77127160_38c8_47a8_920b_90a7b2cd8e3f.slice/crio-a08b2bff7b95904780a817bb0381dc9193b6024687789e484b2a4c1acb0fd824 WatchSource:0}: Error finding container a08b2bff7b95904780a817bb0381dc9193b6024687789e484b2a4c1acb0fd824: Status 404 returned error can't find the container with id a08b2bff7b95904780a817bb0381dc9193b6024687789e484b2a4c1acb0fd824 Oct 01 14:01:11 crc kubenswrapper[4605]: I1001 14:01:11.045125 4605 generic.go:334] "Generic (PLEG): container finished" podID="0effdbf0-5be0-4076-b5a6-2b941e5d16e3" containerID="7f928d31bbf0d797d8853b8870397f4392e5f69e5fb826772107d0bf7c886718" exitCode=0 Oct 01 14:01:11 crc kubenswrapper[4605]: I1001 14:01:11.045939 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-d6qqt" event={"ID":"0effdbf0-5be0-4076-b5a6-2b941e5d16e3","Type":"ContainerDied","Data":"7f928d31bbf0d797d8853b8870397f4392e5f69e5fb826772107d0bf7c886718"} Oct 01 14:01:11 crc kubenswrapper[4605]: I1001 14:01:11.045964 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-d6qqt" event={"ID":"0effdbf0-5be0-4076-b5a6-2b941e5d16e3","Type":"ContainerStarted","Data":"9e66892f8f25dbf4effbfa2237c1341588592b58f546bc34511efd3dcb5b5271"} Oct 01 14:01:11 crc kubenswrapper[4605]: I1001 14:01:11.159700 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-tfbpz"] Oct 01 14:01:12 crc kubenswrapper[4605]: I1001 14:01:12.063008 4605 generic.go:334] "Generic (PLEG): container finished" podID="202abc2b-e141-498c-9198-489fbc0e5130" containerID="ef32932c77e1999fec090f0e565c0665639fe0043a0a3edd6b5a48c22458bedc" exitCode=0 Oct 01 14:01:12 crc kubenswrapper[4605]: I1001 14:01:12.063082 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-tfbpz" event={"ID":"202abc2b-e141-498c-9198-489fbc0e5130","Type":"ContainerDied","Data":"ef32932c77e1999fec090f0e565c0665639fe0043a0a3edd6b5a48c22458bedc"} Oct 01 14:01:12 crc kubenswrapper[4605]: I1001 14:01:12.063125 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-tfbpz" event={"ID":"202abc2b-e141-498c-9198-489fbc0e5130","Type":"ContainerStarted","Data":"9e419156d786aa3bf14b4e51597767b04109a728ec2892e03e1cfb54af663d7f"} Oct 01 14:01:12 crc kubenswrapper[4605]: I1001 14:01:12.064699 4605 generic.go:334] "Generic (PLEG): container finished" podID="4c3df3b9-829b-4ebb-9593-487b1f6ddce1" containerID="10262fe6c5244d7199a82329e4f9003b4be448295e97eee8fea9a9f656c66597" exitCode=0 Oct 01 14:01:12 crc kubenswrapper[4605]: I1001 14:01:12.064770 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-x8zcm" event={"ID":"4c3df3b9-829b-4ebb-9593-487b1f6ddce1","Type":"ContainerDied","Data":"10262fe6c5244d7199a82329e4f9003b4be448295e97eee8fea9a9f656c66597"} Oct 01 14:01:12 crc kubenswrapper[4605]: I1001 14:01:12.066271 4605 generic.go:334] "Generic (PLEG): container finished" podID="77127160-38c8-47a8-920b-90a7b2cd8e3f" containerID="701bc72e5736e51beddc7d0057d6a8028da318154b89eb18c8c171f84e698d09" exitCode=0 Oct 01 14:01:12 crc kubenswrapper[4605]: I1001 14:01:12.066469 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-kfnp9" event={"ID":"77127160-38c8-47a8-920b-90a7b2cd8e3f","Type":"ContainerDied","Data":"701bc72e5736e51beddc7d0057d6a8028da318154b89eb18c8c171f84e698d09"} Oct 01 14:01:12 crc kubenswrapper[4605]: I1001 14:01:12.066487 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-kfnp9" event={"ID":"77127160-38c8-47a8-920b-90a7b2cd8e3f","Type":"ContainerStarted","Data":"a08b2bff7b95904780a817bb0381dc9193b6024687789e484b2a4c1acb0fd824"} Oct 01 14:01:12 crc kubenswrapper[4605]: I1001 14:01:12.389800 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-d6qqt" Oct 01 14:01:12 crc kubenswrapper[4605]: I1001 14:01:12.494138 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c2zrc\" (UniqueName: \"kubernetes.io/projected/0effdbf0-5be0-4076-b5a6-2b941e5d16e3-kube-api-access-c2zrc\") pod \"0effdbf0-5be0-4076-b5a6-2b941e5d16e3\" (UID: \"0effdbf0-5be0-4076-b5a6-2b941e5d16e3\") " Oct 01 14:01:12 crc kubenswrapper[4605]: I1001 14:01:12.499626 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0effdbf0-5be0-4076-b5a6-2b941e5d16e3-kube-api-access-c2zrc" (OuterVolumeSpecName: "kube-api-access-c2zrc") pod "0effdbf0-5be0-4076-b5a6-2b941e5d16e3" (UID: "0effdbf0-5be0-4076-b5a6-2b941e5d16e3"). InnerVolumeSpecName "kube-api-access-c2zrc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:01:12 crc kubenswrapper[4605]: I1001 14:01:12.596520 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c2zrc\" (UniqueName: \"kubernetes.io/projected/0effdbf0-5be0-4076-b5a6-2b941e5d16e3-kube-api-access-c2zrc\") on node \"crc\" DevicePath \"\"" Oct 01 14:01:13 crc kubenswrapper[4605]: I1001 14:01:13.075818 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-d6qqt" Oct 01 14:01:13 crc kubenswrapper[4605]: I1001 14:01:13.076404 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-d6qqt" event={"ID":"0effdbf0-5be0-4076-b5a6-2b941e5d16e3","Type":"ContainerDied","Data":"9e66892f8f25dbf4effbfa2237c1341588592b58f546bc34511efd3dcb5b5271"} Oct 01 14:01:13 crc kubenswrapper[4605]: I1001 14:01:13.076475 4605 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9e66892f8f25dbf4effbfa2237c1341588592b58f546bc34511efd3dcb5b5271" Oct 01 14:01:13 crc kubenswrapper[4605]: I1001 14:01:13.414002 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-kfnp9" Oct 01 14:01:13 crc kubenswrapper[4605]: I1001 14:01:13.523040 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vhtw5\" (UniqueName: \"kubernetes.io/projected/77127160-38c8-47a8-920b-90a7b2cd8e3f-kube-api-access-vhtw5\") pod \"77127160-38c8-47a8-920b-90a7b2cd8e3f\" (UID: \"77127160-38c8-47a8-920b-90a7b2cd8e3f\") " Oct 01 14:01:13 crc kubenswrapper[4605]: I1001 14:01:13.528179 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/77127160-38c8-47a8-920b-90a7b2cd8e3f-kube-api-access-vhtw5" (OuterVolumeSpecName: "kube-api-access-vhtw5") pod "77127160-38c8-47a8-920b-90a7b2cd8e3f" (UID: "77127160-38c8-47a8-920b-90a7b2cd8e3f"). InnerVolumeSpecName "kube-api-access-vhtw5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:01:13 crc kubenswrapper[4605]: I1001 14:01:13.592547 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-x8zcm" Oct 01 14:01:13 crc kubenswrapper[4605]: I1001 14:01:13.597611 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-tfbpz" Oct 01 14:01:13 crc kubenswrapper[4605]: I1001 14:01:13.624038 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4c3df3b9-829b-4ebb-9593-487b1f6ddce1-scripts\") pod \"4c3df3b9-829b-4ebb-9593-487b1f6ddce1\" (UID: \"4c3df3b9-829b-4ebb-9593-487b1f6ddce1\") " Oct 01 14:01:13 crc kubenswrapper[4605]: I1001 14:01:13.624106 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7kktx\" (UniqueName: \"kubernetes.io/projected/4c3df3b9-829b-4ebb-9593-487b1f6ddce1-kube-api-access-7kktx\") pod \"4c3df3b9-829b-4ebb-9593-487b1f6ddce1\" (UID: \"4c3df3b9-829b-4ebb-9593-487b1f6ddce1\") " Oct 01 14:01:13 crc kubenswrapper[4605]: I1001 14:01:13.624153 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c3df3b9-829b-4ebb-9593-487b1f6ddce1-combined-ca-bundle\") pod \"4c3df3b9-829b-4ebb-9593-487b1f6ddce1\" (UID: \"4c3df3b9-829b-4ebb-9593-487b1f6ddce1\") " Oct 01 14:01:13 crc kubenswrapper[4605]: I1001 14:01:13.624210 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/4c3df3b9-829b-4ebb-9593-487b1f6ddce1-dispersionconf\") pod \"4c3df3b9-829b-4ebb-9593-487b1f6ddce1\" (UID: \"4c3df3b9-829b-4ebb-9593-487b1f6ddce1\") " Oct 01 14:01:13 crc kubenswrapper[4605]: I1001 14:01:13.624325 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/4c3df3b9-829b-4ebb-9593-487b1f6ddce1-swiftconf\") pod \"4c3df3b9-829b-4ebb-9593-487b1f6ddce1\" (UID: \"4c3df3b9-829b-4ebb-9593-487b1f6ddce1\") " Oct 01 14:01:13 crc kubenswrapper[4605]: I1001 14:01:13.624354 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/4c3df3b9-829b-4ebb-9593-487b1f6ddce1-etc-swift\") pod \"4c3df3b9-829b-4ebb-9593-487b1f6ddce1\" (UID: \"4c3df3b9-829b-4ebb-9593-487b1f6ddce1\") " Oct 01 14:01:13 crc kubenswrapper[4605]: I1001 14:01:13.624390 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/4c3df3b9-829b-4ebb-9593-487b1f6ddce1-ring-data-devices\") pod \"4c3df3b9-829b-4ebb-9593-487b1f6ddce1\" (UID: \"4c3df3b9-829b-4ebb-9593-487b1f6ddce1\") " Oct 01 14:01:13 crc kubenswrapper[4605]: I1001 14:01:13.624417 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-snkzh\" (UniqueName: \"kubernetes.io/projected/202abc2b-e141-498c-9198-489fbc0e5130-kube-api-access-snkzh\") pod \"202abc2b-e141-498c-9198-489fbc0e5130\" (UID: \"202abc2b-e141-498c-9198-489fbc0e5130\") " Oct 01 14:01:13 crc kubenswrapper[4605]: I1001 14:01:13.624719 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vhtw5\" (UniqueName: \"kubernetes.io/projected/77127160-38c8-47a8-920b-90a7b2cd8e3f-kube-api-access-vhtw5\") on node \"crc\" DevicePath \"\"" Oct 01 14:01:13 crc kubenswrapper[4605]: I1001 14:01:13.625404 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4c3df3b9-829b-4ebb-9593-487b1f6ddce1-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "4c3df3b9-829b-4ebb-9593-487b1f6ddce1" (UID: "4c3df3b9-829b-4ebb-9593-487b1f6ddce1"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:01:13 crc kubenswrapper[4605]: I1001 14:01:13.627654 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4c3df3b9-829b-4ebb-9593-487b1f6ddce1-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "4c3df3b9-829b-4ebb-9593-487b1f6ddce1" (UID: "4c3df3b9-829b-4ebb-9593-487b1f6ddce1"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:01:13 crc kubenswrapper[4605]: I1001 14:01:13.628844 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4c3df3b9-829b-4ebb-9593-487b1f6ddce1-kube-api-access-7kktx" (OuterVolumeSpecName: "kube-api-access-7kktx") pod "4c3df3b9-829b-4ebb-9593-487b1f6ddce1" (UID: "4c3df3b9-829b-4ebb-9593-487b1f6ddce1"). InnerVolumeSpecName "kube-api-access-7kktx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:01:13 crc kubenswrapper[4605]: I1001 14:01:13.628979 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/202abc2b-e141-498c-9198-489fbc0e5130-kube-api-access-snkzh" (OuterVolumeSpecName: "kube-api-access-snkzh") pod "202abc2b-e141-498c-9198-489fbc0e5130" (UID: "202abc2b-e141-498c-9198-489fbc0e5130"). InnerVolumeSpecName "kube-api-access-snkzh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:01:13 crc kubenswrapper[4605]: I1001 14:01:13.630551 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c3df3b9-829b-4ebb-9593-487b1f6ddce1-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "4c3df3b9-829b-4ebb-9593-487b1f6ddce1" (UID: "4c3df3b9-829b-4ebb-9593-487b1f6ddce1"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:01:13 crc kubenswrapper[4605]: I1001 14:01:13.650087 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c3df3b9-829b-4ebb-9593-487b1f6ddce1-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "4c3df3b9-829b-4ebb-9593-487b1f6ddce1" (UID: "4c3df3b9-829b-4ebb-9593-487b1f6ddce1"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:01:13 crc kubenswrapper[4605]: I1001 14:01:13.655153 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c3df3b9-829b-4ebb-9593-487b1f6ddce1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4c3df3b9-829b-4ebb-9593-487b1f6ddce1" (UID: "4c3df3b9-829b-4ebb-9593-487b1f6ddce1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:01:13 crc kubenswrapper[4605]: I1001 14:01:13.660975 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4c3df3b9-829b-4ebb-9593-487b1f6ddce1-scripts" (OuterVolumeSpecName: "scripts") pod "4c3df3b9-829b-4ebb-9593-487b1f6ddce1" (UID: "4c3df3b9-829b-4ebb-9593-487b1f6ddce1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:01:13 crc kubenswrapper[4605]: I1001 14:01:13.725997 4605 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c3df3b9-829b-4ebb-9593-487b1f6ddce1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 14:01:13 crc kubenswrapper[4605]: I1001 14:01:13.726043 4605 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/4c3df3b9-829b-4ebb-9593-487b1f6ddce1-dispersionconf\") on node \"crc\" DevicePath \"\"" Oct 01 14:01:13 crc kubenswrapper[4605]: I1001 14:01:13.726055 4605 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/4c3df3b9-829b-4ebb-9593-487b1f6ddce1-swiftconf\") on node \"crc\" DevicePath \"\"" Oct 01 14:01:13 crc kubenswrapper[4605]: I1001 14:01:13.726065 4605 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/4c3df3b9-829b-4ebb-9593-487b1f6ddce1-etc-swift\") on node \"crc\" DevicePath \"\"" Oct 01 14:01:13 crc kubenswrapper[4605]: I1001 14:01:13.726077 4605 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/4c3df3b9-829b-4ebb-9593-487b1f6ddce1-ring-data-devices\") on node \"crc\" DevicePath \"\"" Oct 01 14:01:13 crc kubenswrapper[4605]: I1001 14:01:13.726086 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-snkzh\" (UniqueName: \"kubernetes.io/projected/202abc2b-e141-498c-9198-489fbc0e5130-kube-api-access-snkzh\") on node \"crc\" DevicePath \"\"" Oct 01 14:01:13 crc kubenswrapper[4605]: I1001 14:01:13.726111 4605 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4c3df3b9-829b-4ebb-9593-487b1f6ddce1-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 14:01:13 crc kubenswrapper[4605]: I1001 14:01:13.726120 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7kktx\" (UniqueName: \"kubernetes.io/projected/4c3df3b9-829b-4ebb-9593-487b1f6ddce1-kube-api-access-7kktx\") on node \"crc\" DevicePath \"\"" Oct 01 14:01:14 crc kubenswrapper[4605]: I1001 14:01:14.084178 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-x8zcm" Oct 01 14:01:14 crc kubenswrapper[4605]: I1001 14:01:14.084217 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-x8zcm" event={"ID":"4c3df3b9-829b-4ebb-9593-487b1f6ddce1","Type":"ContainerDied","Data":"97ec14ed044d1342826efacfa09091d27a4c4b59c353f8f30449b02077c962bd"} Oct 01 14:01:14 crc kubenswrapper[4605]: I1001 14:01:14.084266 4605 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="97ec14ed044d1342826efacfa09091d27a4c4b59c353f8f30449b02077c962bd" Oct 01 14:01:14 crc kubenswrapper[4605]: I1001 14:01:14.086492 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-kfnp9" event={"ID":"77127160-38c8-47a8-920b-90a7b2cd8e3f","Type":"ContainerDied","Data":"a08b2bff7b95904780a817bb0381dc9193b6024687789e484b2a4c1acb0fd824"} Oct 01 14:01:14 crc kubenswrapper[4605]: I1001 14:01:14.086546 4605 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a08b2bff7b95904780a817bb0381dc9193b6024687789e484b2a4c1acb0fd824" Oct 01 14:01:14 crc kubenswrapper[4605]: I1001 14:01:14.086510 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-kfnp9" Oct 01 14:01:14 crc kubenswrapper[4605]: I1001 14:01:14.088131 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-tfbpz" event={"ID":"202abc2b-e141-498c-9198-489fbc0e5130","Type":"ContainerDied","Data":"9e419156d786aa3bf14b4e51597767b04109a728ec2892e03e1cfb54af663d7f"} Oct 01 14:01:14 crc kubenswrapper[4605]: I1001 14:01:14.088247 4605 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9e419156d786aa3bf14b4e51597767b04109a728ec2892e03e1cfb54af663d7f" Oct 01 14:01:14 crc kubenswrapper[4605]: I1001 14:01:14.088209 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-tfbpz" Oct 01 14:01:14 crc kubenswrapper[4605]: I1001 14:01:14.500873 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-698758b865-l45gl" Oct 01 14:01:14 crc kubenswrapper[4605]: I1001 14:01:14.570031 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7cb5889db5-2vm8x"] Oct 01 14:01:14 crc kubenswrapper[4605]: I1001 14:01:14.570308 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7cb5889db5-2vm8x" podUID="479f5510-ba1f-46e2-86d3-39e660975097" containerName="dnsmasq-dns" containerID="cri-o://1d431248fbb83db87abccccce088031e5d2af7647a73a31c69c534cfe5370530" gracePeriod=10 Oct 01 14:01:15 crc kubenswrapper[4605]: I1001 14:01:15.017562 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7cb5889db5-2vm8x" Oct 01 14:01:15 crc kubenswrapper[4605]: I1001 14:01:15.052504 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/479f5510-ba1f-46e2-86d3-39e660975097-config\") pod \"479f5510-ba1f-46e2-86d3-39e660975097\" (UID: \"479f5510-ba1f-46e2-86d3-39e660975097\") " Oct 01 14:01:15 crc kubenswrapper[4605]: I1001 14:01:15.052598 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/479f5510-ba1f-46e2-86d3-39e660975097-dns-svc\") pod \"479f5510-ba1f-46e2-86d3-39e660975097\" (UID: \"479f5510-ba1f-46e2-86d3-39e660975097\") " Oct 01 14:01:15 crc kubenswrapper[4605]: I1001 14:01:15.052733 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lqcf2\" (UniqueName: \"kubernetes.io/projected/479f5510-ba1f-46e2-86d3-39e660975097-kube-api-access-lqcf2\") pod \"479f5510-ba1f-46e2-86d3-39e660975097\" (UID: \"479f5510-ba1f-46e2-86d3-39e660975097\") " Oct 01 14:01:15 crc kubenswrapper[4605]: I1001 14:01:15.063395 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/479f5510-ba1f-46e2-86d3-39e660975097-kube-api-access-lqcf2" (OuterVolumeSpecName: "kube-api-access-lqcf2") pod "479f5510-ba1f-46e2-86d3-39e660975097" (UID: "479f5510-ba1f-46e2-86d3-39e660975097"). InnerVolumeSpecName "kube-api-access-lqcf2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:01:15 crc kubenswrapper[4605]: I1001 14:01:15.101490 4605 generic.go:334] "Generic (PLEG): container finished" podID="d753cd5e-e85d-424c-a439-2b51cbedf76f" containerID="20882a754651de043ae04d6134122f8552e8704cf3c71bf28f4e3b8a9f4daab2" exitCode=0 Oct 01 14:01:15 crc kubenswrapper[4605]: I1001 14:01:15.101566 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"d753cd5e-e85d-424c-a439-2b51cbedf76f","Type":"ContainerDied","Data":"20882a754651de043ae04d6134122f8552e8704cf3c71bf28f4e3b8a9f4daab2"} Oct 01 14:01:15 crc kubenswrapper[4605]: I1001 14:01:15.106599 4605 generic.go:334] "Generic (PLEG): container finished" podID="479f5510-ba1f-46e2-86d3-39e660975097" containerID="1d431248fbb83db87abccccce088031e5d2af7647a73a31c69c534cfe5370530" exitCode=0 Oct 01 14:01:15 crc kubenswrapper[4605]: I1001 14:01:15.106635 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cb5889db5-2vm8x" event={"ID":"479f5510-ba1f-46e2-86d3-39e660975097","Type":"ContainerDied","Data":"1d431248fbb83db87abccccce088031e5d2af7647a73a31c69c534cfe5370530"} Oct 01 14:01:15 crc kubenswrapper[4605]: I1001 14:01:15.106698 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7cb5889db5-2vm8x" Oct 01 14:01:15 crc kubenswrapper[4605]: I1001 14:01:15.106797 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cb5889db5-2vm8x" event={"ID":"479f5510-ba1f-46e2-86d3-39e660975097","Type":"ContainerDied","Data":"ae4ab55c3f603588f5520a248da8c68d208205ff83f92f937741ecb151262957"} Oct 01 14:01:15 crc kubenswrapper[4605]: I1001 14:01:15.106820 4605 scope.go:117] "RemoveContainer" containerID="1d431248fbb83db87abccccce088031e5d2af7647a73a31c69c534cfe5370530" Oct 01 14:01:15 crc kubenswrapper[4605]: I1001 14:01:15.115987 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/479f5510-ba1f-46e2-86d3-39e660975097-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "479f5510-ba1f-46e2-86d3-39e660975097" (UID: "479f5510-ba1f-46e2-86d3-39e660975097"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:01:15 crc kubenswrapper[4605]: I1001 14:01:15.143085 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/479f5510-ba1f-46e2-86d3-39e660975097-config" (OuterVolumeSpecName: "config") pod "479f5510-ba1f-46e2-86d3-39e660975097" (UID: "479f5510-ba1f-46e2-86d3-39e660975097"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:01:15 crc kubenswrapper[4605]: I1001 14:01:15.147948 4605 scope.go:117] "RemoveContainer" containerID="0eedaf45e1e8614a5094bd9cbc29f6643f7ea32b08db9c813d9a282b31958bb4" Oct 01 14:01:15 crc kubenswrapper[4605]: I1001 14:01:15.155455 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lqcf2\" (UniqueName: \"kubernetes.io/projected/479f5510-ba1f-46e2-86d3-39e660975097-kube-api-access-lqcf2\") on node \"crc\" DevicePath \"\"" Oct 01 14:01:15 crc kubenswrapper[4605]: I1001 14:01:15.155481 4605 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/479f5510-ba1f-46e2-86d3-39e660975097-config\") on node \"crc\" DevicePath \"\"" Oct 01 14:01:15 crc kubenswrapper[4605]: I1001 14:01:15.155490 4605 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/479f5510-ba1f-46e2-86d3-39e660975097-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 01 14:01:15 crc kubenswrapper[4605]: I1001 14:01:15.177940 4605 scope.go:117] "RemoveContainer" containerID="1d431248fbb83db87abccccce088031e5d2af7647a73a31c69c534cfe5370530" Oct 01 14:01:15 crc kubenswrapper[4605]: E1001 14:01:15.178390 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1d431248fbb83db87abccccce088031e5d2af7647a73a31c69c534cfe5370530\": container with ID starting with 1d431248fbb83db87abccccce088031e5d2af7647a73a31c69c534cfe5370530 not found: ID does not exist" containerID="1d431248fbb83db87abccccce088031e5d2af7647a73a31c69c534cfe5370530" Oct 01 14:01:15 crc kubenswrapper[4605]: I1001 14:01:15.178431 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d431248fbb83db87abccccce088031e5d2af7647a73a31c69c534cfe5370530"} err="failed to get container status \"1d431248fbb83db87abccccce088031e5d2af7647a73a31c69c534cfe5370530\": rpc error: code = NotFound desc = could not find container \"1d431248fbb83db87abccccce088031e5d2af7647a73a31c69c534cfe5370530\": container with ID starting with 1d431248fbb83db87abccccce088031e5d2af7647a73a31c69c534cfe5370530 not found: ID does not exist" Oct 01 14:01:15 crc kubenswrapper[4605]: I1001 14:01:15.178455 4605 scope.go:117] "RemoveContainer" containerID="0eedaf45e1e8614a5094bd9cbc29f6643f7ea32b08db9c813d9a282b31958bb4" Oct 01 14:01:15 crc kubenswrapper[4605]: E1001 14:01:15.178939 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0eedaf45e1e8614a5094bd9cbc29f6643f7ea32b08db9c813d9a282b31958bb4\": container with ID starting with 0eedaf45e1e8614a5094bd9cbc29f6643f7ea32b08db9c813d9a282b31958bb4 not found: ID does not exist" containerID="0eedaf45e1e8614a5094bd9cbc29f6643f7ea32b08db9c813d9a282b31958bb4" Oct 01 14:01:15 crc kubenswrapper[4605]: I1001 14:01:15.178963 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0eedaf45e1e8614a5094bd9cbc29f6643f7ea32b08db9c813d9a282b31958bb4"} err="failed to get container status \"0eedaf45e1e8614a5094bd9cbc29f6643f7ea32b08db9c813d9a282b31958bb4\": rpc error: code = NotFound desc = could not find container \"0eedaf45e1e8614a5094bd9cbc29f6643f7ea32b08db9c813d9a282b31958bb4\": container with ID starting with 0eedaf45e1e8614a5094bd9cbc29f6643f7ea32b08db9c813d9a282b31958bb4 not found: ID does not exist" Oct 01 14:01:15 crc kubenswrapper[4605]: I1001 14:01:15.464569 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7cb5889db5-2vm8x"] Oct 01 14:01:15 crc kubenswrapper[4605]: I1001 14:01:15.470476 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7cb5889db5-2vm8x"] Oct 01 14:01:15 crc kubenswrapper[4605]: I1001 14:01:15.935321 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="479f5510-ba1f-46e2-86d3-39e660975097" path="/var/lib/kubelet/pods/479f5510-ba1f-46e2-86d3-39e660975097/volumes" Oct 01 14:01:16 crc kubenswrapper[4605]: I1001 14:01:16.118374 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"d753cd5e-e85d-424c-a439-2b51cbedf76f","Type":"ContainerStarted","Data":"a842dfd7b6d3c07d77db3601730bb1df84f7335badfbfd2b2c458020725e1c9a"} Oct 01 14:01:16 crc kubenswrapper[4605]: I1001 14:01:16.119824 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Oct 01 14:01:16 crc kubenswrapper[4605]: I1001 14:01:16.121149 4605 generic.go:334] "Generic (PLEG): container finished" podID="18fdf98a-ad5b-4930-b8cc-2422242aac16" containerID="53e43957de2357c15334e32f90fbcc4bc89714062d29d00f8c8a8718dccf0c53" exitCode=0 Oct 01 14:01:16 crc kubenswrapper[4605]: I1001 14:01:16.121212 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"18fdf98a-ad5b-4930-b8cc-2422242aac16","Type":"ContainerDied","Data":"53e43957de2357c15334e32f90fbcc4bc89714062d29d00f8c8a8718dccf0c53"} Oct 01 14:01:16 crc kubenswrapper[4605]: I1001 14:01:16.192152 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=-9223371974.662642 podStartE2EDuration="1m2.192135079s" podCreationTimestamp="2025-10-01 14:00:14 +0000 UTC" firstStartedPulling="2025-10-01 14:00:16.932181484 +0000 UTC m=+939.676157692" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 14:01:16.148368188 +0000 UTC m=+998.892344396" watchObservedRunningTime="2025-10-01 14:01:16.192135079 +0000 UTC m=+998.936111277" Oct 01 14:01:17 crc kubenswrapper[4605]: I1001 14:01:17.133858 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"18fdf98a-ad5b-4930-b8cc-2422242aac16","Type":"ContainerStarted","Data":"b63b48430f83f00e87cf9e6c01f9f12ea65be589b73cbf994253d1f72ebb0ee4"} Oct 01 14:01:17 crc kubenswrapper[4605]: I1001 14:01:17.134466 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:01:17 crc kubenswrapper[4605]: I1001 14:01:17.158420 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=39.317824501 podStartE2EDuration="1m2.158404726s" podCreationTimestamp="2025-10-01 14:00:15 +0000 UTC" firstStartedPulling="2025-10-01 14:00:17.243082777 +0000 UTC m=+939.987058985" lastFinishedPulling="2025-10-01 14:00:40.083663002 +0000 UTC m=+962.827639210" observedRunningTime="2025-10-01 14:01:17.152396274 +0000 UTC m=+999.896372482" watchObservedRunningTime="2025-10-01 14:01:17.158404726 +0000 UTC m=+999.902380934" Oct 01 14:01:19 crc kubenswrapper[4605]: I1001 14:01:19.815860 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-10d2-account-create-tlgkg"] Oct 01 14:01:19 crc kubenswrapper[4605]: E1001 14:01:19.816586 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="202abc2b-e141-498c-9198-489fbc0e5130" containerName="mariadb-database-create" Oct 01 14:01:19 crc kubenswrapper[4605]: I1001 14:01:19.816603 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="202abc2b-e141-498c-9198-489fbc0e5130" containerName="mariadb-database-create" Oct 01 14:01:19 crc kubenswrapper[4605]: E1001 14:01:19.816615 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c3df3b9-829b-4ebb-9593-487b1f6ddce1" containerName="swift-ring-rebalance" Oct 01 14:01:19 crc kubenswrapper[4605]: I1001 14:01:19.816623 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c3df3b9-829b-4ebb-9593-487b1f6ddce1" containerName="swift-ring-rebalance" Oct 01 14:01:19 crc kubenswrapper[4605]: E1001 14:01:19.816637 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="479f5510-ba1f-46e2-86d3-39e660975097" containerName="dnsmasq-dns" Oct 01 14:01:19 crc kubenswrapper[4605]: I1001 14:01:19.816645 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="479f5510-ba1f-46e2-86d3-39e660975097" containerName="dnsmasq-dns" Oct 01 14:01:19 crc kubenswrapper[4605]: E1001 14:01:19.816655 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0effdbf0-5be0-4076-b5a6-2b941e5d16e3" containerName="mariadb-database-create" Oct 01 14:01:19 crc kubenswrapper[4605]: I1001 14:01:19.816662 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="0effdbf0-5be0-4076-b5a6-2b941e5d16e3" containerName="mariadb-database-create" Oct 01 14:01:19 crc kubenswrapper[4605]: E1001 14:01:19.816675 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="479f5510-ba1f-46e2-86d3-39e660975097" containerName="init" Oct 01 14:01:19 crc kubenswrapper[4605]: I1001 14:01:19.816682 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="479f5510-ba1f-46e2-86d3-39e660975097" containerName="init" Oct 01 14:01:19 crc kubenswrapper[4605]: E1001 14:01:19.816692 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77127160-38c8-47a8-920b-90a7b2cd8e3f" containerName="mariadb-database-create" Oct 01 14:01:19 crc kubenswrapper[4605]: I1001 14:01:19.816699 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="77127160-38c8-47a8-920b-90a7b2cd8e3f" containerName="mariadb-database-create" Oct 01 14:01:19 crc kubenswrapper[4605]: I1001 14:01:19.816894 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="479f5510-ba1f-46e2-86d3-39e660975097" containerName="dnsmasq-dns" Oct 01 14:01:19 crc kubenswrapper[4605]: I1001 14:01:19.816921 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="0effdbf0-5be0-4076-b5a6-2b941e5d16e3" containerName="mariadb-database-create" Oct 01 14:01:19 crc kubenswrapper[4605]: I1001 14:01:19.816935 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="202abc2b-e141-498c-9198-489fbc0e5130" containerName="mariadb-database-create" Oct 01 14:01:19 crc kubenswrapper[4605]: I1001 14:01:19.816948 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="77127160-38c8-47a8-920b-90a7b2cd8e3f" containerName="mariadb-database-create" Oct 01 14:01:19 crc kubenswrapper[4605]: I1001 14:01:19.817046 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="4c3df3b9-829b-4ebb-9593-487b1f6ddce1" containerName="swift-ring-rebalance" Oct 01 14:01:19 crc kubenswrapper[4605]: I1001 14:01:19.817790 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-10d2-account-create-tlgkg" Oct 01 14:01:19 crc kubenswrapper[4605]: I1001 14:01:19.821111 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Oct 01 14:01:19 crc kubenswrapper[4605]: I1001 14:01:19.837544 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-10d2-account-create-tlgkg"] Oct 01 14:01:19 crc kubenswrapper[4605]: I1001 14:01:19.930066 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m7m9c\" (UniqueName: \"kubernetes.io/projected/830104b9-e272-4098-ac03-f03681f72078-kube-api-access-m7m9c\") pod \"keystone-10d2-account-create-tlgkg\" (UID: \"830104b9-e272-4098-ac03-f03681f72078\") " pod="openstack/keystone-10d2-account-create-tlgkg" Oct 01 14:01:20 crc kubenswrapper[4605]: I1001 14:01:20.006460 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-a1df-account-create-tt6sl"] Oct 01 14:01:20 crc kubenswrapper[4605]: I1001 14:01:20.007464 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-a1df-account-create-tt6sl" Oct 01 14:01:20 crc kubenswrapper[4605]: I1001 14:01:20.020115 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-a1df-account-create-tt6sl"] Oct 01 14:01:20 crc kubenswrapper[4605]: I1001 14:01:20.020385 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Oct 01 14:01:20 crc kubenswrapper[4605]: I1001 14:01:20.031126 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m7m9c\" (UniqueName: \"kubernetes.io/projected/830104b9-e272-4098-ac03-f03681f72078-kube-api-access-m7m9c\") pod \"keystone-10d2-account-create-tlgkg\" (UID: \"830104b9-e272-4098-ac03-f03681f72078\") " pod="openstack/keystone-10d2-account-create-tlgkg" Oct 01 14:01:20 crc kubenswrapper[4605]: I1001 14:01:20.058778 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m7m9c\" (UniqueName: \"kubernetes.io/projected/830104b9-e272-4098-ac03-f03681f72078-kube-api-access-m7m9c\") pod \"keystone-10d2-account-create-tlgkg\" (UID: \"830104b9-e272-4098-ac03-f03681f72078\") " pod="openstack/keystone-10d2-account-create-tlgkg" Oct 01 14:01:20 crc kubenswrapper[4605]: I1001 14:01:20.133196 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-89fxz\" (UniqueName: \"kubernetes.io/projected/1a17c5ad-ae46-4ac5-8195-b33580e0e77d-kube-api-access-89fxz\") pod \"placement-a1df-account-create-tt6sl\" (UID: \"1a17c5ad-ae46-4ac5-8195-b33580e0e77d\") " pod="openstack/placement-a1df-account-create-tt6sl" Oct 01 14:01:20 crc kubenswrapper[4605]: I1001 14:01:20.150048 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-10d2-account-create-tlgkg" Oct 01 14:01:20 crc kubenswrapper[4605]: I1001 14:01:20.235139 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-89fxz\" (UniqueName: \"kubernetes.io/projected/1a17c5ad-ae46-4ac5-8195-b33580e0e77d-kube-api-access-89fxz\") pod \"placement-a1df-account-create-tt6sl\" (UID: \"1a17c5ad-ae46-4ac5-8195-b33580e0e77d\") " pod="openstack/placement-a1df-account-create-tt6sl" Oct 01 14:01:20 crc kubenswrapper[4605]: I1001 14:01:20.255807 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-89fxz\" (UniqueName: \"kubernetes.io/projected/1a17c5ad-ae46-4ac5-8195-b33580e0e77d-kube-api-access-89fxz\") pod \"placement-a1df-account-create-tt6sl\" (UID: \"1a17c5ad-ae46-4ac5-8195-b33580e0e77d\") " pod="openstack/placement-a1df-account-create-tt6sl" Oct 01 14:01:20 crc kubenswrapper[4605]: I1001 14:01:20.332912 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-a1df-account-create-tt6sl" Oct 01 14:01:20 crc kubenswrapper[4605]: I1001 14:01:20.596390 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-3f5e-account-create-r2dbh"] Oct 01 14:01:20 crc kubenswrapper[4605]: I1001 14:01:20.597640 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-3f5e-account-create-r2dbh" Oct 01 14:01:20 crc kubenswrapper[4605]: I1001 14:01:20.601675 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Oct 01 14:01:20 crc kubenswrapper[4605]: I1001 14:01:20.653791 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-3f5e-account-create-r2dbh"] Oct 01 14:01:20 crc kubenswrapper[4605]: I1001 14:01:20.721622 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-10d2-account-create-tlgkg"] Oct 01 14:01:20 crc kubenswrapper[4605]: W1001 14:01:20.724465 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod830104b9_e272_4098_ac03_f03681f72078.slice/crio-c9c2a3814c0e52613b9ff8e893c86bfcf0f9af752451ac9c0444b33ddc8ba88a WatchSource:0}: Error finding container c9c2a3814c0e52613b9ff8e893c86bfcf0f9af752451ac9c0444b33ddc8ba88a: Status 404 returned error can't find the container with id c9c2a3814c0e52613b9ff8e893c86bfcf0f9af752451ac9c0444b33ddc8ba88a Oct 01 14:01:20 crc kubenswrapper[4605]: I1001 14:01:20.744854 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dhcg9\" (UniqueName: \"kubernetes.io/projected/9e3467e0-d2bc-48eb-a15a-16a3ebd1d813-kube-api-access-dhcg9\") pod \"glance-3f5e-account-create-r2dbh\" (UID: \"9e3467e0-d2bc-48eb-a15a-16a3ebd1d813\") " pod="openstack/glance-3f5e-account-create-r2dbh" Oct 01 14:01:20 crc kubenswrapper[4605]: I1001 14:01:20.748486 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Oct 01 14:01:20 crc kubenswrapper[4605]: I1001 14:01:20.846294 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dhcg9\" (UniqueName: \"kubernetes.io/projected/9e3467e0-d2bc-48eb-a15a-16a3ebd1d813-kube-api-access-dhcg9\") pod \"glance-3f5e-account-create-r2dbh\" (UID: \"9e3467e0-d2bc-48eb-a15a-16a3ebd1d813\") " pod="openstack/glance-3f5e-account-create-r2dbh" Oct 01 14:01:20 crc kubenswrapper[4605]: I1001 14:01:20.871549 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dhcg9\" (UniqueName: \"kubernetes.io/projected/9e3467e0-d2bc-48eb-a15a-16a3ebd1d813-kube-api-access-dhcg9\") pod \"glance-3f5e-account-create-r2dbh\" (UID: \"9e3467e0-d2bc-48eb-a15a-16a3ebd1d813\") " pod="openstack/glance-3f5e-account-create-r2dbh" Oct 01 14:01:20 crc kubenswrapper[4605]: I1001 14:01:20.911376 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-a1df-account-create-tt6sl"] Oct 01 14:01:20 crc kubenswrapper[4605]: I1001 14:01:20.916702 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-3f5e-account-create-r2dbh" Oct 01 14:01:21 crc kubenswrapper[4605]: I1001 14:01:21.184236 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-a1df-account-create-tt6sl" event={"ID":"1a17c5ad-ae46-4ac5-8195-b33580e0e77d","Type":"ContainerStarted","Data":"94ddb15d37152d8db72fdfaf956d80589769f69b4a3ece0ae297026254708481"} Oct 01 14:01:21 crc kubenswrapper[4605]: I1001 14:01:21.192414 4605 generic.go:334] "Generic (PLEG): container finished" podID="830104b9-e272-4098-ac03-f03681f72078" containerID="87fc9f8e8849a911550de077eee0ac95c773779ad40f1c5d99475dbd37847e2d" exitCode=0 Oct 01 14:01:21 crc kubenswrapper[4605]: I1001 14:01:21.192466 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-10d2-account-create-tlgkg" event={"ID":"830104b9-e272-4098-ac03-f03681f72078","Type":"ContainerDied","Data":"87fc9f8e8849a911550de077eee0ac95c773779ad40f1c5d99475dbd37847e2d"} Oct 01 14:01:21 crc kubenswrapper[4605]: I1001 14:01:21.192502 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-10d2-account-create-tlgkg" event={"ID":"830104b9-e272-4098-ac03-f03681f72078","Type":"ContainerStarted","Data":"c9c2a3814c0e52613b9ff8e893c86bfcf0f9af752451ac9c0444b33ddc8ba88a"} Oct 01 14:01:21 crc kubenswrapper[4605]: I1001 14:01:21.445574 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-3f5e-account-create-r2dbh"] Oct 01 14:01:22 crc kubenswrapper[4605]: I1001 14:01:22.200192 4605 generic.go:334] "Generic (PLEG): container finished" podID="1a17c5ad-ae46-4ac5-8195-b33580e0e77d" containerID="0bd576d0f0ca5a7e0ee3cd08c3d585797589b9efdd5683773f1ffd21c2de7067" exitCode=0 Oct 01 14:01:22 crc kubenswrapper[4605]: I1001 14:01:22.200238 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-a1df-account-create-tt6sl" event={"ID":"1a17c5ad-ae46-4ac5-8195-b33580e0e77d","Type":"ContainerDied","Data":"0bd576d0f0ca5a7e0ee3cd08c3d585797589b9efdd5683773f1ffd21c2de7067"} Oct 01 14:01:22 crc kubenswrapper[4605]: I1001 14:01:22.203254 4605 generic.go:334] "Generic (PLEG): container finished" podID="9e3467e0-d2bc-48eb-a15a-16a3ebd1d813" containerID="af7b763235d9d5ec306603244eefa291e4fc408065b9a8a45171dbddb62cef69" exitCode=0 Oct 01 14:01:22 crc kubenswrapper[4605]: I1001 14:01:22.203326 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-3f5e-account-create-r2dbh" event={"ID":"9e3467e0-d2bc-48eb-a15a-16a3ebd1d813","Type":"ContainerDied","Data":"af7b763235d9d5ec306603244eefa291e4fc408065b9a8a45171dbddb62cef69"} Oct 01 14:01:22 crc kubenswrapper[4605]: I1001 14:01:22.203353 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-3f5e-account-create-r2dbh" event={"ID":"9e3467e0-d2bc-48eb-a15a-16a3ebd1d813","Type":"ContainerStarted","Data":"8ade6be627dd941944a15879e3a5dd968f6195d80a89601a507666fa94e821ad"} Oct 01 14:01:22 crc kubenswrapper[4605]: I1001 14:01:22.619277 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-10d2-account-create-tlgkg" Oct 01 14:01:22 crc kubenswrapper[4605]: I1001 14:01:22.682683 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m7m9c\" (UniqueName: \"kubernetes.io/projected/830104b9-e272-4098-ac03-f03681f72078-kube-api-access-m7m9c\") pod \"830104b9-e272-4098-ac03-f03681f72078\" (UID: \"830104b9-e272-4098-ac03-f03681f72078\") " Oct 01 14:01:22 crc kubenswrapper[4605]: I1001 14:01:22.695126 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/830104b9-e272-4098-ac03-f03681f72078-kube-api-access-m7m9c" (OuterVolumeSpecName: "kube-api-access-m7m9c") pod "830104b9-e272-4098-ac03-f03681f72078" (UID: "830104b9-e272-4098-ac03-f03681f72078"). InnerVolumeSpecName "kube-api-access-m7m9c". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:01:22 crc kubenswrapper[4605]: I1001 14:01:22.784656 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m7m9c\" (UniqueName: \"kubernetes.io/projected/830104b9-e272-4098-ac03-f03681f72078-kube-api-access-m7m9c\") on node \"crc\" DevicePath \"\"" Oct 01 14:01:23 crc kubenswrapper[4605]: I1001 14:01:23.210260 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-10d2-account-create-tlgkg" event={"ID":"830104b9-e272-4098-ac03-f03681f72078","Type":"ContainerDied","Data":"c9c2a3814c0e52613b9ff8e893c86bfcf0f9af752451ac9c0444b33ddc8ba88a"} Oct 01 14:01:23 crc kubenswrapper[4605]: I1001 14:01:23.211586 4605 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c9c2a3814c0e52613b9ff8e893c86bfcf0f9af752451ac9c0444b33ddc8ba88a" Oct 01 14:01:23 crc kubenswrapper[4605]: I1001 14:01:23.210281 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-10d2-account-create-tlgkg" Oct 01 14:01:23 crc kubenswrapper[4605]: I1001 14:01:23.640610 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-a1df-account-create-tt6sl" Oct 01 14:01:23 crc kubenswrapper[4605]: I1001 14:01:23.646421 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-3f5e-account-create-r2dbh" Oct 01 14:01:23 crc kubenswrapper[4605]: I1001 14:01:23.699225 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-89fxz\" (UniqueName: \"kubernetes.io/projected/1a17c5ad-ae46-4ac5-8195-b33580e0e77d-kube-api-access-89fxz\") pod \"1a17c5ad-ae46-4ac5-8195-b33580e0e77d\" (UID: \"1a17c5ad-ae46-4ac5-8195-b33580e0e77d\") " Oct 01 14:01:23 crc kubenswrapper[4605]: I1001 14:01:23.729619 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1a17c5ad-ae46-4ac5-8195-b33580e0e77d-kube-api-access-89fxz" (OuterVolumeSpecName: "kube-api-access-89fxz") pod "1a17c5ad-ae46-4ac5-8195-b33580e0e77d" (UID: "1a17c5ad-ae46-4ac5-8195-b33580e0e77d"). InnerVolumeSpecName "kube-api-access-89fxz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:01:23 crc kubenswrapper[4605]: I1001 14:01:23.800812 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dhcg9\" (UniqueName: \"kubernetes.io/projected/9e3467e0-d2bc-48eb-a15a-16a3ebd1d813-kube-api-access-dhcg9\") pod \"9e3467e0-d2bc-48eb-a15a-16a3ebd1d813\" (UID: \"9e3467e0-d2bc-48eb-a15a-16a3ebd1d813\") " Oct 01 14:01:23 crc kubenswrapper[4605]: I1001 14:01:23.801197 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-89fxz\" (UniqueName: \"kubernetes.io/projected/1a17c5ad-ae46-4ac5-8195-b33580e0e77d-kube-api-access-89fxz\") on node \"crc\" DevicePath \"\"" Oct 01 14:01:23 crc kubenswrapper[4605]: I1001 14:01:23.803761 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9e3467e0-d2bc-48eb-a15a-16a3ebd1d813-kube-api-access-dhcg9" (OuterVolumeSpecName: "kube-api-access-dhcg9") pod "9e3467e0-d2bc-48eb-a15a-16a3ebd1d813" (UID: "9e3467e0-d2bc-48eb-a15a-16a3ebd1d813"). InnerVolumeSpecName "kube-api-access-dhcg9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:01:23 crc kubenswrapper[4605]: I1001 14:01:23.902570 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dhcg9\" (UniqueName: \"kubernetes.io/projected/9e3467e0-d2bc-48eb-a15a-16a3ebd1d813-kube-api-access-dhcg9\") on node \"crc\" DevicePath \"\"" Oct 01 14:01:24 crc kubenswrapper[4605]: I1001 14:01:24.220350 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-3f5e-account-create-r2dbh" Oct 01 14:01:24 crc kubenswrapper[4605]: I1001 14:01:24.220374 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-3f5e-account-create-r2dbh" event={"ID":"9e3467e0-d2bc-48eb-a15a-16a3ebd1d813","Type":"ContainerDied","Data":"8ade6be627dd941944a15879e3a5dd968f6195d80a89601a507666fa94e821ad"} Oct 01 14:01:24 crc kubenswrapper[4605]: I1001 14:01:24.220429 4605 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8ade6be627dd941944a15879e3a5dd968f6195d80a89601a507666fa94e821ad" Oct 01 14:01:24 crc kubenswrapper[4605]: I1001 14:01:24.222614 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-a1df-account-create-tt6sl" event={"ID":"1a17c5ad-ae46-4ac5-8195-b33580e0e77d","Type":"ContainerDied","Data":"94ddb15d37152d8db72fdfaf956d80589769f69b4a3ece0ae297026254708481"} Oct 01 14:01:24 crc kubenswrapper[4605]: I1001 14:01:24.222645 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-a1df-account-create-tt6sl" Oct 01 14:01:24 crc kubenswrapper[4605]: I1001 14:01:24.222676 4605 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="94ddb15d37152d8db72fdfaf956d80589769f69b4a3ece0ae297026254708481" Oct 01 14:01:25 crc kubenswrapper[4605]: I1001 14:01:25.222653 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/fedbab19-fa82-4d92-b787-de85226cd34f-etc-swift\") pod \"swift-storage-0\" (UID: \"fedbab19-fa82-4d92-b787-de85226cd34f\") " pod="openstack/swift-storage-0" Oct 01 14:01:25 crc kubenswrapper[4605]: I1001 14:01:25.234926 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/fedbab19-fa82-4d92-b787-de85226cd34f-etc-swift\") pod \"swift-storage-0\" (UID: \"fedbab19-fa82-4d92-b787-de85226cd34f\") " pod="openstack/swift-storage-0" Oct 01 14:01:25 crc kubenswrapper[4605]: I1001 14:01:25.348821 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Oct 01 14:01:25 crc kubenswrapper[4605]: I1001 14:01:25.692384 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-l69r5"] Oct 01 14:01:25 crc kubenswrapper[4605]: E1001 14:01:25.693068 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="830104b9-e272-4098-ac03-f03681f72078" containerName="mariadb-account-create" Oct 01 14:01:25 crc kubenswrapper[4605]: I1001 14:01:25.693080 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="830104b9-e272-4098-ac03-f03681f72078" containerName="mariadb-account-create" Oct 01 14:01:25 crc kubenswrapper[4605]: E1001 14:01:25.693122 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a17c5ad-ae46-4ac5-8195-b33580e0e77d" containerName="mariadb-account-create" Oct 01 14:01:25 crc kubenswrapper[4605]: I1001 14:01:25.693128 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a17c5ad-ae46-4ac5-8195-b33580e0e77d" containerName="mariadb-account-create" Oct 01 14:01:25 crc kubenswrapper[4605]: E1001 14:01:25.693149 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e3467e0-d2bc-48eb-a15a-16a3ebd1d813" containerName="mariadb-account-create" Oct 01 14:01:25 crc kubenswrapper[4605]: I1001 14:01:25.693156 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e3467e0-d2bc-48eb-a15a-16a3ebd1d813" containerName="mariadb-account-create" Oct 01 14:01:25 crc kubenswrapper[4605]: I1001 14:01:25.696052 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="830104b9-e272-4098-ac03-f03681f72078" containerName="mariadb-account-create" Oct 01 14:01:25 crc kubenswrapper[4605]: I1001 14:01:25.696111 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="1a17c5ad-ae46-4ac5-8195-b33580e0e77d" containerName="mariadb-account-create" Oct 01 14:01:25 crc kubenswrapper[4605]: I1001 14:01:25.696127 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e3467e0-d2bc-48eb-a15a-16a3ebd1d813" containerName="mariadb-account-create" Oct 01 14:01:25 crc kubenswrapper[4605]: I1001 14:01:25.696737 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-l69r5" Oct 01 14:01:25 crc kubenswrapper[4605]: I1001 14:01:25.702280 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-nbts5" Oct 01 14:01:25 crc kubenswrapper[4605]: I1001 14:01:25.702453 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Oct 01 14:01:25 crc kubenswrapper[4605]: I1001 14:01:25.722763 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-l69r5"] Oct 01 14:01:25 crc kubenswrapper[4605]: I1001 14:01:25.882630 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9489bed-ef81-40d8-8a3e-1cc162ced1b6-combined-ca-bundle\") pod \"glance-db-sync-l69r5\" (UID: \"a9489bed-ef81-40d8-8a3e-1cc162ced1b6\") " pod="openstack/glance-db-sync-l69r5" Oct 01 14:01:25 crc kubenswrapper[4605]: I1001 14:01:25.882675 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4xlqt\" (UniqueName: \"kubernetes.io/projected/a9489bed-ef81-40d8-8a3e-1cc162ced1b6-kube-api-access-4xlqt\") pod \"glance-db-sync-l69r5\" (UID: \"a9489bed-ef81-40d8-8a3e-1cc162ced1b6\") " pod="openstack/glance-db-sync-l69r5" Oct 01 14:01:25 crc kubenswrapper[4605]: I1001 14:01:25.882707 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a9489bed-ef81-40d8-8a3e-1cc162ced1b6-config-data\") pod \"glance-db-sync-l69r5\" (UID: \"a9489bed-ef81-40d8-8a3e-1cc162ced1b6\") " pod="openstack/glance-db-sync-l69r5" Oct 01 14:01:25 crc kubenswrapper[4605]: I1001 14:01:25.882835 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a9489bed-ef81-40d8-8a3e-1cc162ced1b6-db-sync-config-data\") pod \"glance-db-sync-l69r5\" (UID: \"a9489bed-ef81-40d8-8a3e-1cc162ced1b6\") " pod="openstack/glance-db-sync-l69r5" Oct 01 14:01:25 crc kubenswrapper[4605]: I1001 14:01:25.946308 4605 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-jvb44" podUID="a37367ae-0e7b-4ad1-afb4-c48ca6282706" containerName="ovn-controller" probeResult="failure" output=< Oct 01 14:01:25 crc kubenswrapper[4605]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Oct 01 14:01:25 crc kubenswrapper[4605]: > Oct 01 14:01:25 crc kubenswrapper[4605]: I1001 14:01:25.984188 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a9489bed-ef81-40d8-8a3e-1cc162ced1b6-db-sync-config-data\") pod \"glance-db-sync-l69r5\" (UID: \"a9489bed-ef81-40d8-8a3e-1cc162ced1b6\") " pod="openstack/glance-db-sync-l69r5" Oct 01 14:01:25 crc kubenswrapper[4605]: I1001 14:01:25.984272 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9489bed-ef81-40d8-8a3e-1cc162ced1b6-combined-ca-bundle\") pod \"glance-db-sync-l69r5\" (UID: \"a9489bed-ef81-40d8-8a3e-1cc162ced1b6\") " pod="openstack/glance-db-sync-l69r5" Oct 01 14:01:25 crc kubenswrapper[4605]: I1001 14:01:25.984294 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4xlqt\" (UniqueName: \"kubernetes.io/projected/a9489bed-ef81-40d8-8a3e-1cc162ced1b6-kube-api-access-4xlqt\") pod \"glance-db-sync-l69r5\" (UID: \"a9489bed-ef81-40d8-8a3e-1cc162ced1b6\") " pod="openstack/glance-db-sync-l69r5" Oct 01 14:01:25 crc kubenswrapper[4605]: I1001 14:01:25.984337 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a9489bed-ef81-40d8-8a3e-1cc162ced1b6-config-data\") pod \"glance-db-sync-l69r5\" (UID: \"a9489bed-ef81-40d8-8a3e-1cc162ced1b6\") " pod="openstack/glance-db-sync-l69r5" Oct 01 14:01:25 crc kubenswrapper[4605]: I1001 14:01:25.989263 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9489bed-ef81-40d8-8a3e-1cc162ced1b6-combined-ca-bundle\") pod \"glance-db-sync-l69r5\" (UID: \"a9489bed-ef81-40d8-8a3e-1cc162ced1b6\") " pod="openstack/glance-db-sync-l69r5" Oct 01 14:01:25 crc kubenswrapper[4605]: I1001 14:01:25.992742 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a9489bed-ef81-40d8-8a3e-1cc162ced1b6-config-data\") pod \"glance-db-sync-l69r5\" (UID: \"a9489bed-ef81-40d8-8a3e-1cc162ced1b6\") " pod="openstack/glance-db-sync-l69r5" Oct 01 14:01:26 crc kubenswrapper[4605]: I1001 14:01:26.004570 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a9489bed-ef81-40d8-8a3e-1cc162ced1b6-db-sync-config-data\") pod \"glance-db-sync-l69r5\" (UID: \"a9489bed-ef81-40d8-8a3e-1cc162ced1b6\") " pod="openstack/glance-db-sync-l69r5" Oct 01 14:01:26 crc kubenswrapper[4605]: I1001 14:01:26.005838 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4xlqt\" (UniqueName: \"kubernetes.io/projected/a9489bed-ef81-40d8-8a3e-1cc162ced1b6-kube-api-access-4xlqt\") pod \"glance-db-sync-l69r5\" (UID: \"a9489bed-ef81-40d8-8a3e-1cc162ced1b6\") " pod="openstack/glance-db-sync-l69r5" Oct 01 14:01:26 crc kubenswrapper[4605]: I1001 14:01:26.050924 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-l69r5" Oct 01 14:01:26 crc kubenswrapper[4605]: I1001 14:01:26.146670 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Oct 01 14:01:26 crc kubenswrapper[4605]: I1001 14:01:26.242586 4605 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="d753cd5e-e85d-424c-a439-2b51cbedf76f" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.102:5671: connect: connection refused" Oct 01 14:01:26 crc kubenswrapper[4605]: I1001 14:01:26.244534 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"fedbab19-fa82-4d92-b787-de85226cd34f","Type":"ContainerStarted","Data":"d79e14e13d79541f01d7ff2151b80ad43b4b94d3dfefe9ed09e5d742385cb36f"} Oct 01 14:01:26 crc kubenswrapper[4605]: I1001 14:01:26.557154 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:01:26 crc kubenswrapper[4605]: I1001 14:01:26.684466 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-l69r5"] Oct 01 14:01:26 crc kubenswrapper[4605]: W1001 14:01:26.700101 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda9489bed_ef81_40d8_8a3e_1cc162ced1b6.slice/crio-58d23a4d7724bf84d2bc846d3190762454856c2397f434b32bfaafb5a7fb9f38 WatchSource:0}: Error finding container 58d23a4d7724bf84d2bc846d3190762454856c2397f434b32bfaafb5a7fb9f38: Status 404 returned error can't find the container with id 58d23a4d7724bf84d2bc846d3190762454856c2397f434b32bfaafb5a7fb9f38 Oct 01 14:01:27 crc kubenswrapper[4605]: I1001 14:01:27.252793 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-l69r5" event={"ID":"a9489bed-ef81-40d8-8a3e-1cc162ced1b6","Type":"ContainerStarted","Data":"58d23a4d7724bf84d2bc846d3190762454856c2397f434b32bfaafb5a7fb9f38"} Oct 01 14:01:28 crc kubenswrapper[4605]: I1001 14:01:28.264417 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"fedbab19-fa82-4d92-b787-de85226cd34f","Type":"ContainerStarted","Data":"77d58badd3c965282b35ca8692a2ee8358a5877e7352ac0144b24985e81d791b"} Oct 01 14:01:28 crc kubenswrapper[4605]: I1001 14:01:28.264720 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"fedbab19-fa82-4d92-b787-de85226cd34f","Type":"ContainerStarted","Data":"a7039b0f1886123bb5b3bdd053cebd3c25de601e67ef2d6ef7b59952075c1c13"} Oct 01 14:01:28 crc kubenswrapper[4605]: I1001 14:01:28.264730 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"fedbab19-fa82-4d92-b787-de85226cd34f","Type":"ContainerStarted","Data":"93c932fe2ec9f2ad4f9401e4b390c45dff99e4a9386a93f09dfe8f71b04c974f"} Oct 01 14:01:29 crc kubenswrapper[4605]: I1001 14:01:29.273535 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"fedbab19-fa82-4d92-b787-de85226cd34f","Type":"ContainerStarted","Data":"904e37dc5c3139492f7279d834e0a681f3b440820ed330a4ce5212f7e621d449"} Oct 01 14:01:30 crc kubenswrapper[4605]: I1001 14:01:30.288229 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"fedbab19-fa82-4d92-b787-de85226cd34f","Type":"ContainerStarted","Data":"f82412980c467b5c453b51545aaa33946264af06fa1fe0db4117de4d366a34b0"} Oct 01 14:01:30 crc kubenswrapper[4605]: I1001 14:01:30.288672 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"fedbab19-fa82-4d92-b787-de85226cd34f","Type":"ContainerStarted","Data":"ab0631172fcd1a91fd3fa8e42efd07ccd0115d2446bbedbf83213f0616f9e74c"} Oct 01 14:01:30 crc kubenswrapper[4605]: I1001 14:01:30.288683 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"fedbab19-fa82-4d92-b787-de85226cd34f","Type":"ContainerStarted","Data":"5aa9d2af74035204da872a70aca91c3639370ac51dc310898929c2ad5f796fa3"} Oct 01 14:01:30 crc kubenswrapper[4605]: I1001 14:01:30.931448 4605 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-jvb44" podUID="a37367ae-0e7b-4ad1-afb4-c48ca6282706" containerName="ovn-controller" probeResult="failure" output=< Oct 01 14:01:30 crc kubenswrapper[4605]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Oct 01 14:01:30 crc kubenswrapper[4605]: > Oct 01 14:01:31 crc kubenswrapper[4605]: I1001 14:01:31.046327 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-csqtk" Oct 01 14:01:31 crc kubenswrapper[4605]: I1001 14:01:31.088850 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-csqtk" Oct 01 14:01:31 crc kubenswrapper[4605]: I1001 14:01:31.291201 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-jvb44-config-jw27n"] Oct 01 14:01:31 crc kubenswrapper[4605]: I1001 14:01:31.292748 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-jvb44-config-jw27n" Oct 01 14:01:31 crc kubenswrapper[4605]: I1001 14:01:31.295968 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Oct 01 14:01:31 crc kubenswrapper[4605]: I1001 14:01:31.306939 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"fedbab19-fa82-4d92-b787-de85226cd34f","Type":"ContainerStarted","Data":"48cadef1af21d2d583a90b2d913324ab2234ef431351305f1562c495a19ac4e8"} Oct 01 14:01:31 crc kubenswrapper[4605]: I1001 14:01:31.312651 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-jvb44-config-jw27n"] Oct 01 14:01:31 crc kubenswrapper[4605]: I1001 14:01:31.377113 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/048ad14f-a171-486c-95c6-41b6ad3dbdae-var-log-ovn\") pod \"ovn-controller-jvb44-config-jw27n\" (UID: \"048ad14f-a171-486c-95c6-41b6ad3dbdae\") " pod="openstack/ovn-controller-jvb44-config-jw27n" Oct 01 14:01:31 crc kubenswrapper[4605]: I1001 14:01:31.377433 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/048ad14f-a171-486c-95c6-41b6ad3dbdae-scripts\") pod \"ovn-controller-jvb44-config-jw27n\" (UID: \"048ad14f-a171-486c-95c6-41b6ad3dbdae\") " pod="openstack/ovn-controller-jvb44-config-jw27n" Oct 01 14:01:31 crc kubenswrapper[4605]: I1001 14:01:31.377637 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/048ad14f-a171-486c-95c6-41b6ad3dbdae-var-run-ovn\") pod \"ovn-controller-jvb44-config-jw27n\" (UID: \"048ad14f-a171-486c-95c6-41b6ad3dbdae\") " pod="openstack/ovn-controller-jvb44-config-jw27n" Oct 01 14:01:31 crc kubenswrapper[4605]: I1001 14:01:31.377822 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/048ad14f-a171-486c-95c6-41b6ad3dbdae-var-run\") pod \"ovn-controller-jvb44-config-jw27n\" (UID: \"048ad14f-a171-486c-95c6-41b6ad3dbdae\") " pod="openstack/ovn-controller-jvb44-config-jw27n" Oct 01 14:01:31 crc kubenswrapper[4605]: I1001 14:01:31.377989 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-894vs\" (UniqueName: \"kubernetes.io/projected/048ad14f-a171-486c-95c6-41b6ad3dbdae-kube-api-access-894vs\") pod \"ovn-controller-jvb44-config-jw27n\" (UID: \"048ad14f-a171-486c-95c6-41b6ad3dbdae\") " pod="openstack/ovn-controller-jvb44-config-jw27n" Oct 01 14:01:31 crc kubenswrapper[4605]: I1001 14:01:31.378208 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/048ad14f-a171-486c-95c6-41b6ad3dbdae-additional-scripts\") pod \"ovn-controller-jvb44-config-jw27n\" (UID: \"048ad14f-a171-486c-95c6-41b6ad3dbdae\") " pod="openstack/ovn-controller-jvb44-config-jw27n" Oct 01 14:01:31 crc kubenswrapper[4605]: I1001 14:01:31.479237 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/048ad14f-a171-486c-95c6-41b6ad3dbdae-var-log-ovn\") pod \"ovn-controller-jvb44-config-jw27n\" (UID: \"048ad14f-a171-486c-95c6-41b6ad3dbdae\") " pod="openstack/ovn-controller-jvb44-config-jw27n" Oct 01 14:01:31 crc kubenswrapper[4605]: I1001 14:01:31.479289 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/048ad14f-a171-486c-95c6-41b6ad3dbdae-scripts\") pod \"ovn-controller-jvb44-config-jw27n\" (UID: \"048ad14f-a171-486c-95c6-41b6ad3dbdae\") " pod="openstack/ovn-controller-jvb44-config-jw27n" Oct 01 14:01:31 crc kubenswrapper[4605]: I1001 14:01:31.479323 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/048ad14f-a171-486c-95c6-41b6ad3dbdae-var-run-ovn\") pod \"ovn-controller-jvb44-config-jw27n\" (UID: \"048ad14f-a171-486c-95c6-41b6ad3dbdae\") " pod="openstack/ovn-controller-jvb44-config-jw27n" Oct 01 14:01:31 crc kubenswrapper[4605]: I1001 14:01:31.479362 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/048ad14f-a171-486c-95c6-41b6ad3dbdae-var-run\") pod \"ovn-controller-jvb44-config-jw27n\" (UID: \"048ad14f-a171-486c-95c6-41b6ad3dbdae\") " pod="openstack/ovn-controller-jvb44-config-jw27n" Oct 01 14:01:31 crc kubenswrapper[4605]: I1001 14:01:31.479418 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-894vs\" (UniqueName: \"kubernetes.io/projected/048ad14f-a171-486c-95c6-41b6ad3dbdae-kube-api-access-894vs\") pod \"ovn-controller-jvb44-config-jw27n\" (UID: \"048ad14f-a171-486c-95c6-41b6ad3dbdae\") " pod="openstack/ovn-controller-jvb44-config-jw27n" Oct 01 14:01:31 crc kubenswrapper[4605]: I1001 14:01:31.479476 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/048ad14f-a171-486c-95c6-41b6ad3dbdae-additional-scripts\") pod \"ovn-controller-jvb44-config-jw27n\" (UID: \"048ad14f-a171-486c-95c6-41b6ad3dbdae\") " pod="openstack/ovn-controller-jvb44-config-jw27n" Oct 01 14:01:31 crc kubenswrapper[4605]: I1001 14:01:31.480110 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/048ad14f-a171-486c-95c6-41b6ad3dbdae-var-run-ovn\") pod \"ovn-controller-jvb44-config-jw27n\" (UID: \"048ad14f-a171-486c-95c6-41b6ad3dbdae\") " pod="openstack/ovn-controller-jvb44-config-jw27n" Oct 01 14:01:31 crc kubenswrapper[4605]: I1001 14:01:31.480322 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/048ad14f-a171-486c-95c6-41b6ad3dbdae-var-log-ovn\") pod \"ovn-controller-jvb44-config-jw27n\" (UID: \"048ad14f-a171-486c-95c6-41b6ad3dbdae\") " pod="openstack/ovn-controller-jvb44-config-jw27n" Oct 01 14:01:31 crc kubenswrapper[4605]: I1001 14:01:31.480396 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/048ad14f-a171-486c-95c6-41b6ad3dbdae-additional-scripts\") pod \"ovn-controller-jvb44-config-jw27n\" (UID: \"048ad14f-a171-486c-95c6-41b6ad3dbdae\") " pod="openstack/ovn-controller-jvb44-config-jw27n" Oct 01 14:01:31 crc kubenswrapper[4605]: I1001 14:01:31.480454 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/048ad14f-a171-486c-95c6-41b6ad3dbdae-var-run\") pod \"ovn-controller-jvb44-config-jw27n\" (UID: \"048ad14f-a171-486c-95c6-41b6ad3dbdae\") " pod="openstack/ovn-controller-jvb44-config-jw27n" Oct 01 14:01:31 crc kubenswrapper[4605]: I1001 14:01:31.482568 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/048ad14f-a171-486c-95c6-41b6ad3dbdae-scripts\") pod \"ovn-controller-jvb44-config-jw27n\" (UID: \"048ad14f-a171-486c-95c6-41b6ad3dbdae\") " pod="openstack/ovn-controller-jvb44-config-jw27n" Oct 01 14:01:31 crc kubenswrapper[4605]: I1001 14:01:31.519073 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-894vs\" (UniqueName: \"kubernetes.io/projected/048ad14f-a171-486c-95c6-41b6ad3dbdae-kube-api-access-894vs\") pod \"ovn-controller-jvb44-config-jw27n\" (UID: \"048ad14f-a171-486c-95c6-41b6ad3dbdae\") " pod="openstack/ovn-controller-jvb44-config-jw27n" Oct 01 14:01:31 crc kubenswrapper[4605]: I1001 14:01:31.624475 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-jvb44-config-jw27n" Oct 01 14:01:32 crc kubenswrapper[4605]: I1001 14:01:32.138640 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-jvb44-config-jw27n"] Oct 01 14:01:32 crc kubenswrapper[4605]: W1001 14:01:32.147593 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod048ad14f_a171_486c_95c6_41b6ad3dbdae.slice/crio-3da4a8c7c3a62aa40b2cce1c80442d2cc1e89d277b89f6df5013058b4b81641a WatchSource:0}: Error finding container 3da4a8c7c3a62aa40b2cce1c80442d2cc1e89d277b89f6df5013058b4b81641a: Status 404 returned error can't find the container with id 3da4a8c7c3a62aa40b2cce1c80442d2cc1e89d277b89f6df5013058b4b81641a Oct 01 14:01:32 crc kubenswrapper[4605]: I1001 14:01:32.332027 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-jvb44-config-jw27n" event={"ID":"048ad14f-a171-486c-95c6-41b6ad3dbdae","Type":"ContainerStarted","Data":"3da4a8c7c3a62aa40b2cce1c80442d2cc1e89d277b89f6df5013058b4b81641a"} Oct 01 14:01:33 crc kubenswrapper[4605]: I1001 14:01:33.347048 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"fedbab19-fa82-4d92-b787-de85226cd34f","Type":"ContainerStarted","Data":"bdd7b624443b1b2ae036625c27e4e2fd1d0cd9c5791e6dda14b6f7e1f6fdcf5f"} Oct 01 14:01:33 crc kubenswrapper[4605]: I1001 14:01:33.347464 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"fedbab19-fa82-4d92-b787-de85226cd34f","Type":"ContainerStarted","Data":"049bdc00b6398e48aa329a3f2a80e71ed08792e316dd7490b683f217b956bd4f"} Oct 01 14:01:33 crc kubenswrapper[4605]: I1001 14:01:33.348634 4605 generic.go:334] "Generic (PLEG): container finished" podID="048ad14f-a171-486c-95c6-41b6ad3dbdae" containerID="78d7b90a8edcea3607571e8dba0ff74ff511761fe7286bbaefd33115b3a5766b" exitCode=0 Oct 01 14:01:33 crc kubenswrapper[4605]: I1001 14:01:33.348671 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-jvb44-config-jw27n" event={"ID":"048ad14f-a171-486c-95c6-41b6ad3dbdae","Type":"ContainerDied","Data":"78d7b90a8edcea3607571e8dba0ff74ff511761fe7286bbaefd33115b3a5766b"} Oct 01 14:01:34 crc kubenswrapper[4605]: I1001 14:01:34.362290 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"fedbab19-fa82-4d92-b787-de85226cd34f","Type":"ContainerStarted","Data":"540c9569b06f9b314a077e56e3a5ae70ccd21cd50fdb55f0132ae7eb8666f198"} Oct 01 14:01:34 crc kubenswrapper[4605]: I1001 14:01:34.362586 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"fedbab19-fa82-4d92-b787-de85226cd34f","Type":"ContainerStarted","Data":"a1e76502c86d37de3146ed948c7971117c87e149636272d6998959daf14c8c54"} Oct 01 14:01:35 crc kubenswrapper[4605]: I1001 14:01:35.925940 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-jvb44" Oct 01 14:01:36 crc kubenswrapper[4605]: I1001 14:01:36.242985 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Oct 01 14:01:36 crc kubenswrapper[4605]: I1001 14:01:36.650722 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-9fz97"] Oct 01 14:01:36 crc kubenswrapper[4605]: I1001 14:01:36.652071 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-9fz97" Oct 01 14:01:36 crc kubenswrapper[4605]: I1001 14:01:36.671836 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-9fz97"] Oct 01 14:01:36 crc kubenswrapper[4605]: I1001 14:01:36.780209 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dn68r\" (UniqueName: \"kubernetes.io/projected/92500ff4-ab25-4756-831e-b32aba1e71ff-kube-api-access-dn68r\") pod \"cinder-db-create-9fz97\" (UID: \"92500ff4-ab25-4756-831e-b32aba1e71ff\") " pod="openstack/cinder-db-create-9fz97" Oct 01 14:01:36 crc kubenswrapper[4605]: I1001 14:01:36.798826 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-gcgcz"] Oct 01 14:01:36 crc kubenswrapper[4605]: I1001 14:01:36.800866 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-gcgcz" Oct 01 14:01:36 crc kubenswrapper[4605]: I1001 14:01:36.817185 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-gcgcz"] Oct 01 14:01:36 crc kubenswrapper[4605]: I1001 14:01:36.897479 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dn68r\" (UniqueName: \"kubernetes.io/projected/92500ff4-ab25-4756-831e-b32aba1e71ff-kube-api-access-dn68r\") pod \"cinder-db-create-9fz97\" (UID: \"92500ff4-ab25-4756-831e-b32aba1e71ff\") " pod="openstack/cinder-db-create-9fz97" Oct 01 14:01:36 crc kubenswrapper[4605]: I1001 14:01:36.897590 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5gwt4\" (UniqueName: \"kubernetes.io/projected/06a3c21a-ca5f-44a9-a6da-ea4ddb772f43-kube-api-access-5gwt4\") pod \"barbican-db-create-gcgcz\" (UID: \"06a3c21a-ca5f-44a9-a6da-ea4ddb772f43\") " pod="openstack/barbican-db-create-gcgcz" Oct 01 14:01:36 crc kubenswrapper[4605]: I1001 14:01:36.936086 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dn68r\" (UniqueName: \"kubernetes.io/projected/92500ff4-ab25-4756-831e-b32aba1e71ff-kube-api-access-dn68r\") pod \"cinder-db-create-9fz97\" (UID: \"92500ff4-ab25-4756-831e-b32aba1e71ff\") " pod="openstack/cinder-db-create-9fz97" Oct 01 14:01:36 crc kubenswrapper[4605]: I1001 14:01:36.967389 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-9fz97" Oct 01 14:01:36 crc kubenswrapper[4605]: I1001 14:01:36.983582 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-rrkt4"] Oct 01 14:01:36 crc kubenswrapper[4605]: I1001 14:01:36.984622 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-rrkt4" Oct 01 14:01:36 crc kubenswrapper[4605]: I1001 14:01:36.998652 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5gwt4\" (UniqueName: \"kubernetes.io/projected/06a3c21a-ca5f-44a9-a6da-ea4ddb772f43-kube-api-access-5gwt4\") pod \"barbican-db-create-gcgcz\" (UID: \"06a3c21a-ca5f-44a9-a6da-ea4ddb772f43\") " pod="openstack/barbican-db-create-gcgcz" Oct 01 14:01:37 crc kubenswrapper[4605]: I1001 14:01:37.005563 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-rrkt4"] Oct 01 14:01:37 crc kubenswrapper[4605]: I1001 14:01:37.051382 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5gwt4\" (UniqueName: \"kubernetes.io/projected/06a3c21a-ca5f-44a9-a6da-ea4ddb772f43-kube-api-access-5gwt4\") pod \"barbican-db-create-gcgcz\" (UID: \"06a3c21a-ca5f-44a9-a6da-ea4ddb772f43\") " pod="openstack/barbican-db-create-gcgcz" Oct 01 14:01:37 crc kubenswrapper[4605]: I1001 14:01:37.091499 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-86rqc"] Oct 01 14:01:37 crc kubenswrapper[4605]: I1001 14:01:37.092634 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-86rqc" Oct 01 14:01:37 crc kubenswrapper[4605]: I1001 14:01:37.096774 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 01 14:01:37 crc kubenswrapper[4605]: I1001 14:01:37.097805 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-t7vm5" Oct 01 14:01:37 crc kubenswrapper[4605]: I1001 14:01:37.099063 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 01 14:01:37 crc kubenswrapper[4605]: I1001 14:01:37.099766 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 01 14:01:37 crc kubenswrapper[4605]: I1001 14:01:37.100694 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pfmtb\" (UniqueName: \"kubernetes.io/projected/158e3a16-bd4b-45e2-be45-e8f36efc579d-kube-api-access-pfmtb\") pod \"neutron-db-create-rrkt4\" (UID: \"158e3a16-bd4b-45e2-be45-e8f36efc579d\") " pod="openstack/neutron-db-create-rrkt4" Oct 01 14:01:37 crc kubenswrapper[4605]: I1001 14:01:37.110093 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-86rqc"] Oct 01 14:01:37 crc kubenswrapper[4605]: I1001 14:01:37.162580 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-gcgcz" Oct 01 14:01:37 crc kubenswrapper[4605]: I1001 14:01:37.202837 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a204e126-3626-4105-840d-85d43b095f8b-combined-ca-bundle\") pod \"keystone-db-sync-86rqc\" (UID: \"a204e126-3626-4105-840d-85d43b095f8b\") " pod="openstack/keystone-db-sync-86rqc" Oct 01 14:01:37 crc kubenswrapper[4605]: I1001 14:01:37.202904 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pfmtb\" (UniqueName: \"kubernetes.io/projected/158e3a16-bd4b-45e2-be45-e8f36efc579d-kube-api-access-pfmtb\") pod \"neutron-db-create-rrkt4\" (UID: \"158e3a16-bd4b-45e2-be45-e8f36efc579d\") " pod="openstack/neutron-db-create-rrkt4" Oct 01 14:01:37 crc kubenswrapper[4605]: I1001 14:01:37.202938 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-brtd2\" (UniqueName: \"kubernetes.io/projected/a204e126-3626-4105-840d-85d43b095f8b-kube-api-access-brtd2\") pod \"keystone-db-sync-86rqc\" (UID: \"a204e126-3626-4105-840d-85d43b095f8b\") " pod="openstack/keystone-db-sync-86rqc" Oct 01 14:01:37 crc kubenswrapper[4605]: I1001 14:01:37.202966 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a204e126-3626-4105-840d-85d43b095f8b-config-data\") pod \"keystone-db-sync-86rqc\" (UID: \"a204e126-3626-4105-840d-85d43b095f8b\") " pod="openstack/keystone-db-sync-86rqc" Oct 01 14:01:37 crc kubenswrapper[4605]: I1001 14:01:37.220806 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pfmtb\" (UniqueName: \"kubernetes.io/projected/158e3a16-bd4b-45e2-be45-e8f36efc579d-kube-api-access-pfmtb\") pod \"neutron-db-create-rrkt4\" (UID: \"158e3a16-bd4b-45e2-be45-e8f36efc579d\") " pod="openstack/neutron-db-create-rrkt4" Oct 01 14:01:37 crc kubenswrapper[4605]: I1001 14:01:37.302340 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-rrkt4" Oct 01 14:01:37 crc kubenswrapper[4605]: I1001 14:01:37.304604 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a204e126-3626-4105-840d-85d43b095f8b-combined-ca-bundle\") pod \"keystone-db-sync-86rqc\" (UID: \"a204e126-3626-4105-840d-85d43b095f8b\") " pod="openstack/keystone-db-sync-86rqc" Oct 01 14:01:37 crc kubenswrapper[4605]: I1001 14:01:37.304664 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-brtd2\" (UniqueName: \"kubernetes.io/projected/a204e126-3626-4105-840d-85d43b095f8b-kube-api-access-brtd2\") pod \"keystone-db-sync-86rqc\" (UID: \"a204e126-3626-4105-840d-85d43b095f8b\") " pod="openstack/keystone-db-sync-86rqc" Oct 01 14:01:37 crc kubenswrapper[4605]: I1001 14:01:37.304692 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a204e126-3626-4105-840d-85d43b095f8b-config-data\") pod \"keystone-db-sync-86rqc\" (UID: \"a204e126-3626-4105-840d-85d43b095f8b\") " pod="openstack/keystone-db-sync-86rqc" Oct 01 14:01:37 crc kubenswrapper[4605]: I1001 14:01:37.309248 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a204e126-3626-4105-840d-85d43b095f8b-combined-ca-bundle\") pod \"keystone-db-sync-86rqc\" (UID: \"a204e126-3626-4105-840d-85d43b095f8b\") " pod="openstack/keystone-db-sync-86rqc" Oct 01 14:01:37 crc kubenswrapper[4605]: I1001 14:01:37.320083 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a204e126-3626-4105-840d-85d43b095f8b-config-data\") pod \"keystone-db-sync-86rqc\" (UID: \"a204e126-3626-4105-840d-85d43b095f8b\") " pod="openstack/keystone-db-sync-86rqc" Oct 01 14:01:37 crc kubenswrapper[4605]: I1001 14:01:37.330115 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-brtd2\" (UniqueName: \"kubernetes.io/projected/a204e126-3626-4105-840d-85d43b095f8b-kube-api-access-brtd2\") pod \"keystone-db-sync-86rqc\" (UID: \"a204e126-3626-4105-840d-85d43b095f8b\") " pod="openstack/keystone-db-sync-86rqc" Oct 01 14:01:37 crc kubenswrapper[4605]: I1001 14:01:37.413079 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-86rqc" Oct 01 14:01:41 crc kubenswrapper[4605]: I1001 14:01:41.860224 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-jvb44-config-jw27n" Oct 01 14:01:41 crc kubenswrapper[4605]: I1001 14:01:41.996611 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/048ad14f-a171-486c-95c6-41b6ad3dbdae-scripts\") pod \"048ad14f-a171-486c-95c6-41b6ad3dbdae\" (UID: \"048ad14f-a171-486c-95c6-41b6ad3dbdae\") " Oct 01 14:01:41 crc kubenswrapper[4605]: I1001 14:01:41.996939 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/048ad14f-a171-486c-95c6-41b6ad3dbdae-var-run\") pod \"048ad14f-a171-486c-95c6-41b6ad3dbdae\" (UID: \"048ad14f-a171-486c-95c6-41b6ad3dbdae\") " Oct 01 14:01:41 crc kubenswrapper[4605]: I1001 14:01:41.996961 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/048ad14f-a171-486c-95c6-41b6ad3dbdae-var-log-ovn\") pod \"048ad14f-a171-486c-95c6-41b6ad3dbdae\" (UID: \"048ad14f-a171-486c-95c6-41b6ad3dbdae\") " Oct 01 14:01:41 crc kubenswrapper[4605]: I1001 14:01:41.997029 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/048ad14f-a171-486c-95c6-41b6ad3dbdae-var-run-ovn\") pod \"048ad14f-a171-486c-95c6-41b6ad3dbdae\" (UID: \"048ad14f-a171-486c-95c6-41b6ad3dbdae\") " Oct 01 14:01:41 crc kubenswrapper[4605]: I1001 14:01:41.997077 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-894vs\" (UniqueName: \"kubernetes.io/projected/048ad14f-a171-486c-95c6-41b6ad3dbdae-kube-api-access-894vs\") pod \"048ad14f-a171-486c-95c6-41b6ad3dbdae\" (UID: \"048ad14f-a171-486c-95c6-41b6ad3dbdae\") " Oct 01 14:01:41 crc kubenswrapper[4605]: I1001 14:01:41.997163 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/048ad14f-a171-486c-95c6-41b6ad3dbdae-additional-scripts\") pod \"048ad14f-a171-486c-95c6-41b6ad3dbdae\" (UID: \"048ad14f-a171-486c-95c6-41b6ad3dbdae\") " Oct 01 14:01:41 crc kubenswrapper[4605]: I1001 14:01:41.997601 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/048ad14f-a171-486c-95c6-41b6ad3dbdae-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "048ad14f-a171-486c-95c6-41b6ad3dbdae" (UID: "048ad14f-a171-486c-95c6-41b6ad3dbdae"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 14:01:41 crc kubenswrapper[4605]: I1001 14:01:41.998767 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/048ad14f-a171-486c-95c6-41b6ad3dbdae-scripts" (OuterVolumeSpecName: "scripts") pod "048ad14f-a171-486c-95c6-41b6ad3dbdae" (UID: "048ad14f-a171-486c-95c6-41b6ad3dbdae"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:01:41 crc kubenswrapper[4605]: I1001 14:01:41.998789 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/048ad14f-a171-486c-95c6-41b6ad3dbdae-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "048ad14f-a171-486c-95c6-41b6ad3dbdae" (UID: "048ad14f-a171-486c-95c6-41b6ad3dbdae"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 14:01:41 crc kubenswrapper[4605]: I1001 14:01:41.998798 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/048ad14f-a171-486c-95c6-41b6ad3dbdae-var-run" (OuterVolumeSpecName: "var-run") pod "048ad14f-a171-486c-95c6-41b6ad3dbdae" (UID: "048ad14f-a171-486c-95c6-41b6ad3dbdae"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 14:01:41 crc kubenswrapper[4605]: I1001 14:01:41.999241 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/048ad14f-a171-486c-95c6-41b6ad3dbdae-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "048ad14f-a171-486c-95c6-41b6ad3dbdae" (UID: "048ad14f-a171-486c-95c6-41b6ad3dbdae"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:01:42 crc kubenswrapper[4605]: I1001 14:01:42.006380 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/048ad14f-a171-486c-95c6-41b6ad3dbdae-kube-api-access-894vs" (OuterVolumeSpecName: "kube-api-access-894vs") pod "048ad14f-a171-486c-95c6-41b6ad3dbdae" (UID: "048ad14f-a171-486c-95c6-41b6ad3dbdae"). InnerVolumeSpecName "kube-api-access-894vs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:01:42 crc kubenswrapper[4605]: I1001 14:01:42.099704 4605 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/048ad14f-a171-486c-95c6-41b6ad3dbdae-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 14:01:42 crc kubenswrapper[4605]: I1001 14:01:42.099800 4605 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/048ad14f-a171-486c-95c6-41b6ad3dbdae-var-run\") on node \"crc\" DevicePath \"\"" Oct 01 14:01:42 crc kubenswrapper[4605]: I1001 14:01:42.099837 4605 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/048ad14f-a171-486c-95c6-41b6ad3dbdae-var-log-ovn\") on node \"crc\" DevicePath \"\"" Oct 01 14:01:42 crc kubenswrapper[4605]: I1001 14:01:42.099889 4605 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/048ad14f-a171-486c-95c6-41b6ad3dbdae-var-run-ovn\") on node \"crc\" DevicePath \"\"" Oct 01 14:01:42 crc kubenswrapper[4605]: I1001 14:01:42.099901 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-894vs\" (UniqueName: \"kubernetes.io/projected/048ad14f-a171-486c-95c6-41b6ad3dbdae-kube-api-access-894vs\") on node \"crc\" DevicePath \"\"" Oct 01 14:01:42 crc kubenswrapper[4605]: I1001 14:01:42.099962 4605 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/048ad14f-a171-486c-95c6-41b6ad3dbdae-additional-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 14:01:42 crc kubenswrapper[4605]: I1001 14:01:42.207148 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-rrkt4"] Oct 01 14:01:42 crc kubenswrapper[4605]: W1001 14:01:42.211318 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod158e3a16_bd4b_45e2_be45_e8f36efc579d.slice/crio-efb1419abc200919507641f924495db94e6662fecdc567c1c290eeea2ac4e826 WatchSource:0}: Error finding container efb1419abc200919507641f924495db94e6662fecdc567c1c290eeea2ac4e826: Status 404 returned error can't find the container with id efb1419abc200919507641f924495db94e6662fecdc567c1c290eeea2ac4e826 Oct 01 14:01:42 crc kubenswrapper[4605]: I1001 14:01:42.310562 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-86rqc"] Oct 01 14:01:42 crc kubenswrapper[4605]: I1001 14:01:42.432462 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-rrkt4" event={"ID":"158e3a16-bd4b-45e2-be45-e8f36efc579d","Type":"ContainerStarted","Data":"71654a37e926f58fb41b42b8f741d52e76797cfe45508d713c92ad418aa8d09d"} Oct 01 14:01:42 crc kubenswrapper[4605]: I1001 14:01:42.432525 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-rrkt4" event={"ID":"158e3a16-bd4b-45e2-be45-e8f36efc579d","Type":"ContainerStarted","Data":"efb1419abc200919507641f924495db94e6662fecdc567c1c290eeea2ac4e826"} Oct 01 14:01:42 crc kubenswrapper[4605]: I1001 14:01:42.448689 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"fedbab19-fa82-4d92-b787-de85226cd34f","Type":"ContainerStarted","Data":"3b1251f6d86ad2d40f82877ff706b50f118ddb20b797d363c1195d9cbb0d8046"} Oct 01 14:01:42 crc kubenswrapper[4605]: I1001 14:01:42.448738 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"fedbab19-fa82-4d92-b787-de85226cd34f","Type":"ContainerStarted","Data":"4d4e495d90e57aa84efcfcdf2b9d7ee3509a3fe5d2164d0701b92c6cf7e439d7"} Oct 01 14:01:42 crc kubenswrapper[4605]: I1001 14:01:42.448748 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"fedbab19-fa82-4d92-b787-de85226cd34f","Type":"ContainerStarted","Data":"07e70c6c5ecce32c26822bff93c006c02e7ab817f6fc973cb784af22270c0d31"} Oct 01 14:01:42 crc kubenswrapper[4605]: I1001 14:01:42.451969 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-86rqc" event={"ID":"a204e126-3626-4105-840d-85d43b095f8b","Type":"ContainerStarted","Data":"441081bb25b1f7185bb9227d3b366c1c967805e36e3e206346b4a5012082058f"} Oct 01 14:01:42 crc kubenswrapper[4605]: I1001 14:01:42.459605 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-jvb44-config-jw27n" event={"ID":"048ad14f-a171-486c-95c6-41b6ad3dbdae","Type":"ContainerDied","Data":"3da4a8c7c3a62aa40b2cce1c80442d2cc1e89d277b89f6df5013058b4b81641a"} Oct 01 14:01:42 crc kubenswrapper[4605]: I1001 14:01:42.459639 4605 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3da4a8c7c3a62aa40b2cce1c80442d2cc1e89d277b89f6df5013058b4b81641a" Oct 01 14:01:42 crc kubenswrapper[4605]: I1001 14:01:42.459696 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-jvb44-config-jw27n" Oct 01 14:01:42 crc kubenswrapper[4605]: I1001 14:01:42.464617 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-create-rrkt4" podStartSLOduration=6.46459448 podStartE2EDuration="6.46459448s" podCreationTimestamp="2025-10-01 14:01:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 14:01:42.454217917 +0000 UTC m=+1025.198194135" watchObservedRunningTime="2025-10-01 14:01:42.46459448 +0000 UTC m=+1025.208570688" Oct 01 14:01:42 crc kubenswrapper[4605]: I1001 14:01:42.499462 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=43.771836637 podStartE2EDuration="50.499437385s" podCreationTimestamp="2025-10-01 14:00:52 +0000 UTC" firstStartedPulling="2025-10-01 14:01:26.160505493 +0000 UTC m=+1008.904481701" lastFinishedPulling="2025-10-01 14:01:32.888106241 +0000 UTC m=+1015.632082449" observedRunningTime="2025-10-01 14:01:42.495569216 +0000 UTC m=+1025.239545434" watchObservedRunningTime="2025-10-01 14:01:42.499437385 +0000 UTC m=+1025.243413603" Oct 01 14:01:42 crc kubenswrapper[4605]: I1001 14:01:42.762674 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-gxlcx"] Oct 01 14:01:42 crc kubenswrapper[4605]: E1001 14:01:42.763139 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="048ad14f-a171-486c-95c6-41b6ad3dbdae" containerName="ovn-config" Oct 01 14:01:42 crc kubenswrapper[4605]: I1001 14:01:42.763154 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="048ad14f-a171-486c-95c6-41b6ad3dbdae" containerName="ovn-config" Oct 01 14:01:42 crc kubenswrapper[4605]: I1001 14:01:42.763348 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="048ad14f-a171-486c-95c6-41b6ad3dbdae" containerName="ovn-config" Oct 01 14:01:42 crc kubenswrapper[4605]: I1001 14:01:42.779548 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77585f5f8c-gxlcx" Oct 01 14:01:42 crc kubenswrapper[4605]: I1001 14:01:42.784791 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Oct 01 14:01:42 crc kubenswrapper[4605]: I1001 14:01:42.832179 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-gxlcx"] Oct 01 14:01:42 crc kubenswrapper[4605]: I1001 14:01:42.914054 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bd54105c-5b1e-4706-9fe8-83cdca837998-dns-svc\") pod \"dnsmasq-dns-77585f5f8c-gxlcx\" (UID: \"bd54105c-5b1e-4706-9fe8-83cdca837998\") " pod="openstack/dnsmasq-dns-77585f5f8c-gxlcx" Oct 01 14:01:42 crc kubenswrapper[4605]: I1001 14:01:42.915479 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bd54105c-5b1e-4706-9fe8-83cdca837998-ovsdbserver-nb\") pod \"dnsmasq-dns-77585f5f8c-gxlcx\" (UID: \"bd54105c-5b1e-4706-9fe8-83cdca837998\") " pod="openstack/dnsmasq-dns-77585f5f8c-gxlcx" Oct 01 14:01:42 crc kubenswrapper[4605]: I1001 14:01:42.915606 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bd54105c-5b1e-4706-9fe8-83cdca837998-config\") pod \"dnsmasq-dns-77585f5f8c-gxlcx\" (UID: \"bd54105c-5b1e-4706-9fe8-83cdca837998\") " pod="openstack/dnsmasq-dns-77585f5f8c-gxlcx" Oct 01 14:01:42 crc kubenswrapper[4605]: I1001 14:01:42.915764 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bd54105c-5b1e-4706-9fe8-83cdca837998-ovsdbserver-sb\") pod \"dnsmasq-dns-77585f5f8c-gxlcx\" (UID: \"bd54105c-5b1e-4706-9fe8-83cdca837998\") " pod="openstack/dnsmasq-dns-77585f5f8c-gxlcx" Oct 01 14:01:42 crc kubenswrapper[4605]: I1001 14:01:42.915874 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bd54105c-5b1e-4706-9fe8-83cdca837998-dns-swift-storage-0\") pod \"dnsmasq-dns-77585f5f8c-gxlcx\" (UID: \"bd54105c-5b1e-4706-9fe8-83cdca837998\") " pod="openstack/dnsmasq-dns-77585f5f8c-gxlcx" Oct 01 14:01:42 crc kubenswrapper[4605]: I1001 14:01:42.916019 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xdm8f\" (UniqueName: \"kubernetes.io/projected/bd54105c-5b1e-4706-9fe8-83cdca837998-kube-api-access-xdm8f\") pod \"dnsmasq-dns-77585f5f8c-gxlcx\" (UID: \"bd54105c-5b1e-4706-9fe8-83cdca837998\") " pod="openstack/dnsmasq-dns-77585f5f8c-gxlcx" Oct 01 14:01:42 crc kubenswrapper[4605]: I1001 14:01:42.960675 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-jvb44-config-jw27n"] Oct 01 14:01:42 crc kubenswrapper[4605]: I1001 14:01:42.968025 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-jvb44-config-jw27n"] Oct 01 14:01:43 crc kubenswrapper[4605]: I1001 14:01:43.023277 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bd54105c-5b1e-4706-9fe8-83cdca837998-dns-svc\") pod \"dnsmasq-dns-77585f5f8c-gxlcx\" (UID: \"bd54105c-5b1e-4706-9fe8-83cdca837998\") " pod="openstack/dnsmasq-dns-77585f5f8c-gxlcx" Oct 01 14:01:43 crc kubenswrapper[4605]: I1001 14:01:43.023341 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bd54105c-5b1e-4706-9fe8-83cdca837998-ovsdbserver-nb\") pod \"dnsmasq-dns-77585f5f8c-gxlcx\" (UID: \"bd54105c-5b1e-4706-9fe8-83cdca837998\") " pod="openstack/dnsmasq-dns-77585f5f8c-gxlcx" Oct 01 14:01:43 crc kubenswrapper[4605]: I1001 14:01:43.023371 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bd54105c-5b1e-4706-9fe8-83cdca837998-config\") pod \"dnsmasq-dns-77585f5f8c-gxlcx\" (UID: \"bd54105c-5b1e-4706-9fe8-83cdca837998\") " pod="openstack/dnsmasq-dns-77585f5f8c-gxlcx" Oct 01 14:01:43 crc kubenswrapper[4605]: I1001 14:01:43.023412 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bd54105c-5b1e-4706-9fe8-83cdca837998-ovsdbserver-sb\") pod \"dnsmasq-dns-77585f5f8c-gxlcx\" (UID: \"bd54105c-5b1e-4706-9fe8-83cdca837998\") " pod="openstack/dnsmasq-dns-77585f5f8c-gxlcx" Oct 01 14:01:43 crc kubenswrapper[4605]: I1001 14:01:43.023445 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bd54105c-5b1e-4706-9fe8-83cdca837998-dns-swift-storage-0\") pod \"dnsmasq-dns-77585f5f8c-gxlcx\" (UID: \"bd54105c-5b1e-4706-9fe8-83cdca837998\") " pod="openstack/dnsmasq-dns-77585f5f8c-gxlcx" Oct 01 14:01:43 crc kubenswrapper[4605]: I1001 14:01:43.023484 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xdm8f\" (UniqueName: \"kubernetes.io/projected/bd54105c-5b1e-4706-9fe8-83cdca837998-kube-api-access-xdm8f\") pod \"dnsmasq-dns-77585f5f8c-gxlcx\" (UID: \"bd54105c-5b1e-4706-9fe8-83cdca837998\") " pod="openstack/dnsmasq-dns-77585f5f8c-gxlcx" Oct 01 14:01:43 crc kubenswrapper[4605]: I1001 14:01:43.024208 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bd54105c-5b1e-4706-9fe8-83cdca837998-dns-svc\") pod \"dnsmasq-dns-77585f5f8c-gxlcx\" (UID: \"bd54105c-5b1e-4706-9fe8-83cdca837998\") " pod="openstack/dnsmasq-dns-77585f5f8c-gxlcx" Oct 01 14:01:43 crc kubenswrapper[4605]: I1001 14:01:43.024312 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bd54105c-5b1e-4706-9fe8-83cdca837998-ovsdbserver-nb\") pod \"dnsmasq-dns-77585f5f8c-gxlcx\" (UID: \"bd54105c-5b1e-4706-9fe8-83cdca837998\") " pod="openstack/dnsmasq-dns-77585f5f8c-gxlcx" Oct 01 14:01:43 crc kubenswrapper[4605]: I1001 14:01:43.025119 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bd54105c-5b1e-4706-9fe8-83cdca837998-config\") pod \"dnsmasq-dns-77585f5f8c-gxlcx\" (UID: \"bd54105c-5b1e-4706-9fe8-83cdca837998\") " pod="openstack/dnsmasq-dns-77585f5f8c-gxlcx" Oct 01 14:01:43 crc kubenswrapper[4605]: I1001 14:01:43.026025 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bd54105c-5b1e-4706-9fe8-83cdca837998-dns-swift-storage-0\") pod \"dnsmasq-dns-77585f5f8c-gxlcx\" (UID: \"bd54105c-5b1e-4706-9fe8-83cdca837998\") " pod="openstack/dnsmasq-dns-77585f5f8c-gxlcx" Oct 01 14:01:43 crc kubenswrapper[4605]: I1001 14:01:43.026745 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bd54105c-5b1e-4706-9fe8-83cdca837998-ovsdbserver-sb\") pod \"dnsmasq-dns-77585f5f8c-gxlcx\" (UID: \"bd54105c-5b1e-4706-9fe8-83cdca837998\") " pod="openstack/dnsmasq-dns-77585f5f8c-gxlcx" Oct 01 14:01:43 crc kubenswrapper[4605]: I1001 14:01:43.049386 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xdm8f\" (UniqueName: \"kubernetes.io/projected/bd54105c-5b1e-4706-9fe8-83cdca837998-kube-api-access-xdm8f\") pod \"dnsmasq-dns-77585f5f8c-gxlcx\" (UID: \"bd54105c-5b1e-4706-9fe8-83cdca837998\") " pod="openstack/dnsmasq-dns-77585f5f8c-gxlcx" Oct 01 14:01:43 crc kubenswrapper[4605]: I1001 14:01:43.071228 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-9fz97"] Oct 01 14:01:43 crc kubenswrapper[4605]: I1001 14:01:43.097212 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-gcgcz"] Oct 01 14:01:43 crc kubenswrapper[4605]: W1001 14:01:43.114733 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod06a3c21a_ca5f_44a9_a6da_ea4ddb772f43.slice/crio-8a2d414dfd664daee3c9f917450deaa947cfe8c500eaa21d51f8a026494eb326 WatchSource:0}: Error finding container 8a2d414dfd664daee3c9f917450deaa947cfe8c500eaa21d51f8a026494eb326: Status 404 returned error can't find the container with id 8a2d414dfd664daee3c9f917450deaa947cfe8c500eaa21d51f8a026494eb326 Oct 01 14:01:43 crc kubenswrapper[4605]: I1001 14:01:43.122709 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77585f5f8c-gxlcx" Oct 01 14:01:43 crc kubenswrapper[4605]: I1001 14:01:43.472784 4605 generic.go:334] "Generic (PLEG): container finished" podID="92500ff4-ab25-4756-831e-b32aba1e71ff" containerID="f1f86ad225db6bc5faf7414010774230b9ccc60221c1d46a59daf167676f1ce6" exitCode=0 Oct 01 14:01:43 crc kubenswrapper[4605]: I1001 14:01:43.473546 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-9fz97" event={"ID":"92500ff4-ab25-4756-831e-b32aba1e71ff","Type":"ContainerDied","Data":"f1f86ad225db6bc5faf7414010774230b9ccc60221c1d46a59daf167676f1ce6"} Oct 01 14:01:43 crc kubenswrapper[4605]: I1001 14:01:43.473607 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-9fz97" event={"ID":"92500ff4-ab25-4756-831e-b32aba1e71ff","Type":"ContainerStarted","Data":"2889f637eaa3d89d35fae94ae079756d8adab47774e95d26bf0b24e38a694568"} Oct 01 14:01:43 crc kubenswrapper[4605]: I1001 14:01:43.476814 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-l69r5" event={"ID":"a9489bed-ef81-40d8-8a3e-1cc162ced1b6","Type":"ContainerStarted","Data":"b4e365d933fae88e574955c8f9db28bf719af1d77a020daeb9f4f55ae9f7eea1"} Oct 01 14:01:43 crc kubenswrapper[4605]: I1001 14:01:43.479917 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-gcgcz" event={"ID":"06a3c21a-ca5f-44a9-a6da-ea4ddb772f43","Type":"ContainerStarted","Data":"8ed9e1de2256f8929e97adf8a7f228aadc7ec923bca05813cc8b7a4001d8d274"} Oct 01 14:01:43 crc kubenswrapper[4605]: I1001 14:01:43.479949 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-gcgcz" event={"ID":"06a3c21a-ca5f-44a9-a6da-ea4ddb772f43","Type":"ContainerStarted","Data":"8a2d414dfd664daee3c9f917450deaa947cfe8c500eaa21d51f8a026494eb326"} Oct 01 14:01:43 crc kubenswrapper[4605]: I1001 14:01:43.483729 4605 generic.go:334] "Generic (PLEG): container finished" podID="158e3a16-bd4b-45e2-be45-e8f36efc579d" containerID="71654a37e926f58fb41b42b8f741d52e76797cfe45508d713c92ad418aa8d09d" exitCode=0 Oct 01 14:01:43 crc kubenswrapper[4605]: I1001 14:01:43.485333 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-rrkt4" event={"ID":"158e3a16-bd4b-45e2-be45-e8f36efc579d","Type":"ContainerDied","Data":"71654a37e926f58fb41b42b8f741d52e76797cfe45508d713c92ad418aa8d09d"} Oct 01 14:01:43 crc kubenswrapper[4605]: I1001 14:01:43.505864 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-create-gcgcz" podStartSLOduration=7.505822569 podStartE2EDuration="7.505822569s" podCreationTimestamp="2025-10-01 14:01:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 14:01:43.503455179 +0000 UTC m=+1026.247431397" watchObservedRunningTime="2025-10-01 14:01:43.505822569 +0000 UTC m=+1026.249798787" Oct 01 14:01:43 crc kubenswrapper[4605]: I1001 14:01:43.543414 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-l69r5" podStartSLOduration=3.509989696 podStartE2EDuration="18.543397573s" podCreationTimestamp="2025-10-01 14:01:25 +0000 UTC" firstStartedPulling="2025-10-01 14:01:26.701596982 +0000 UTC m=+1009.445573190" lastFinishedPulling="2025-10-01 14:01:41.735004859 +0000 UTC m=+1024.478981067" observedRunningTime="2025-10-01 14:01:43.538764195 +0000 UTC m=+1026.282740403" watchObservedRunningTime="2025-10-01 14:01:43.543397573 +0000 UTC m=+1026.287373781" Oct 01 14:01:43 crc kubenswrapper[4605]: I1001 14:01:43.728126 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-gxlcx"] Oct 01 14:01:43 crc kubenswrapper[4605]: I1001 14:01:43.940654 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="048ad14f-a171-486c-95c6-41b6ad3dbdae" path="/var/lib/kubelet/pods/048ad14f-a171-486c-95c6-41b6ad3dbdae/volumes" Oct 01 14:01:44 crc kubenswrapper[4605]: I1001 14:01:44.496974 4605 generic.go:334] "Generic (PLEG): container finished" podID="06a3c21a-ca5f-44a9-a6da-ea4ddb772f43" containerID="8ed9e1de2256f8929e97adf8a7f228aadc7ec923bca05813cc8b7a4001d8d274" exitCode=0 Oct 01 14:01:44 crc kubenswrapper[4605]: I1001 14:01:44.497054 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-gcgcz" event={"ID":"06a3c21a-ca5f-44a9-a6da-ea4ddb772f43","Type":"ContainerDied","Data":"8ed9e1de2256f8929e97adf8a7f228aadc7ec923bca05813cc8b7a4001d8d274"} Oct 01 14:01:44 crc kubenswrapper[4605]: I1001 14:01:44.499722 4605 generic.go:334] "Generic (PLEG): container finished" podID="bd54105c-5b1e-4706-9fe8-83cdca837998" containerID="aa2f75a0327a9c1e41e6834536f7d410fc04a8a2f2efae406b5eac1f46917f16" exitCode=0 Oct 01 14:01:44 crc kubenswrapper[4605]: I1001 14:01:44.500279 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77585f5f8c-gxlcx" event={"ID":"bd54105c-5b1e-4706-9fe8-83cdca837998","Type":"ContainerDied","Data":"aa2f75a0327a9c1e41e6834536f7d410fc04a8a2f2efae406b5eac1f46917f16"} Oct 01 14:01:44 crc kubenswrapper[4605]: I1001 14:01:44.500304 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77585f5f8c-gxlcx" event={"ID":"bd54105c-5b1e-4706-9fe8-83cdca837998","Type":"ContainerStarted","Data":"635539bf9bc86d34f11d4bad9dcb4a13fddc776c030942d1ffcfae06435a8112"} Oct 01 14:01:45 crc kubenswrapper[4605]: I1001 14:01:45.511316 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77585f5f8c-gxlcx" event={"ID":"bd54105c-5b1e-4706-9fe8-83cdca837998","Type":"ContainerStarted","Data":"3511825df56b94b1ff17ea8a204fc8b39957674d6f9b10331cbcbacaa129dfb9"} Oct 01 14:01:45 crc kubenswrapper[4605]: I1001 14:01:45.513109 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-77585f5f8c-gxlcx" Oct 01 14:01:45 crc kubenswrapper[4605]: I1001 14:01:45.536211 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-77585f5f8c-gxlcx" podStartSLOduration=3.536192215 podStartE2EDuration="3.536192215s" podCreationTimestamp="2025-10-01 14:01:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 14:01:45.535037286 +0000 UTC m=+1028.279013484" watchObservedRunningTime="2025-10-01 14:01:45.536192215 +0000 UTC m=+1028.280168443" Oct 01 14:01:47 crc kubenswrapper[4605]: I1001 14:01:47.441537 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-rrkt4" Oct 01 14:01:47 crc kubenswrapper[4605]: I1001 14:01:47.451550 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-9fz97" Oct 01 14:01:47 crc kubenswrapper[4605]: I1001 14:01:47.463189 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-gcgcz" Oct 01 14:01:47 crc kubenswrapper[4605]: I1001 14:01:47.533004 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-9fz97" event={"ID":"92500ff4-ab25-4756-831e-b32aba1e71ff","Type":"ContainerDied","Data":"2889f637eaa3d89d35fae94ae079756d8adab47774e95d26bf0b24e38a694568"} Oct 01 14:01:47 crc kubenswrapper[4605]: I1001 14:01:47.533048 4605 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2889f637eaa3d89d35fae94ae079756d8adab47774e95d26bf0b24e38a694568" Oct 01 14:01:47 crc kubenswrapper[4605]: I1001 14:01:47.533079 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-9fz97" Oct 01 14:01:47 crc kubenswrapper[4605]: I1001 14:01:47.534586 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-gcgcz" event={"ID":"06a3c21a-ca5f-44a9-a6da-ea4ddb772f43","Type":"ContainerDied","Data":"8a2d414dfd664daee3c9f917450deaa947cfe8c500eaa21d51f8a026494eb326"} Oct 01 14:01:47 crc kubenswrapper[4605]: I1001 14:01:47.534658 4605 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8a2d414dfd664daee3c9f917450deaa947cfe8c500eaa21d51f8a026494eb326" Oct 01 14:01:47 crc kubenswrapper[4605]: I1001 14:01:47.534719 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-gcgcz" Oct 01 14:01:47 crc kubenswrapper[4605]: I1001 14:01:47.536364 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-rrkt4" Oct 01 14:01:47 crc kubenswrapper[4605]: I1001 14:01:47.536358 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-rrkt4" event={"ID":"158e3a16-bd4b-45e2-be45-e8f36efc579d","Type":"ContainerDied","Data":"efb1419abc200919507641f924495db94e6662fecdc567c1c290eeea2ac4e826"} Oct 01 14:01:47 crc kubenswrapper[4605]: I1001 14:01:47.536483 4605 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="efb1419abc200919507641f924495db94e6662fecdc567c1c290eeea2ac4e826" Oct 01 14:01:47 crc kubenswrapper[4605]: I1001 14:01:47.603932 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pfmtb\" (UniqueName: \"kubernetes.io/projected/158e3a16-bd4b-45e2-be45-e8f36efc579d-kube-api-access-pfmtb\") pod \"158e3a16-bd4b-45e2-be45-e8f36efc579d\" (UID: \"158e3a16-bd4b-45e2-be45-e8f36efc579d\") " Oct 01 14:01:47 crc kubenswrapper[4605]: I1001 14:01:47.604041 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5gwt4\" (UniqueName: \"kubernetes.io/projected/06a3c21a-ca5f-44a9-a6da-ea4ddb772f43-kube-api-access-5gwt4\") pod \"06a3c21a-ca5f-44a9-a6da-ea4ddb772f43\" (UID: \"06a3c21a-ca5f-44a9-a6da-ea4ddb772f43\") " Oct 01 14:01:47 crc kubenswrapper[4605]: I1001 14:01:47.604398 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dn68r\" (UniqueName: \"kubernetes.io/projected/92500ff4-ab25-4756-831e-b32aba1e71ff-kube-api-access-dn68r\") pod \"92500ff4-ab25-4756-831e-b32aba1e71ff\" (UID: \"92500ff4-ab25-4756-831e-b32aba1e71ff\") " Oct 01 14:01:47 crc kubenswrapper[4605]: I1001 14:01:47.608251 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/92500ff4-ab25-4756-831e-b32aba1e71ff-kube-api-access-dn68r" (OuterVolumeSpecName: "kube-api-access-dn68r") pod "92500ff4-ab25-4756-831e-b32aba1e71ff" (UID: "92500ff4-ab25-4756-831e-b32aba1e71ff"). InnerVolumeSpecName "kube-api-access-dn68r". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:01:47 crc kubenswrapper[4605]: I1001 14:01:47.611589 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/06a3c21a-ca5f-44a9-a6da-ea4ddb772f43-kube-api-access-5gwt4" (OuterVolumeSpecName: "kube-api-access-5gwt4") pod "06a3c21a-ca5f-44a9-a6da-ea4ddb772f43" (UID: "06a3c21a-ca5f-44a9-a6da-ea4ddb772f43"). InnerVolumeSpecName "kube-api-access-5gwt4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:01:47 crc kubenswrapper[4605]: I1001 14:01:47.625833 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/158e3a16-bd4b-45e2-be45-e8f36efc579d-kube-api-access-pfmtb" (OuterVolumeSpecName: "kube-api-access-pfmtb") pod "158e3a16-bd4b-45e2-be45-e8f36efc579d" (UID: "158e3a16-bd4b-45e2-be45-e8f36efc579d"). InnerVolumeSpecName "kube-api-access-pfmtb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:01:47 crc kubenswrapper[4605]: I1001 14:01:47.708172 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dn68r\" (UniqueName: \"kubernetes.io/projected/92500ff4-ab25-4756-831e-b32aba1e71ff-kube-api-access-dn68r\") on node \"crc\" DevicePath \"\"" Oct 01 14:01:47 crc kubenswrapper[4605]: I1001 14:01:47.713400 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pfmtb\" (UniqueName: \"kubernetes.io/projected/158e3a16-bd4b-45e2-be45-e8f36efc579d-kube-api-access-pfmtb\") on node \"crc\" DevicePath \"\"" Oct 01 14:01:47 crc kubenswrapper[4605]: I1001 14:01:47.713452 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5gwt4\" (UniqueName: \"kubernetes.io/projected/06a3c21a-ca5f-44a9-a6da-ea4ddb772f43-kube-api-access-5gwt4\") on node \"crc\" DevicePath \"\"" Oct 01 14:01:48 crc kubenswrapper[4605]: I1001 14:01:48.552013 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-86rqc" event={"ID":"a204e126-3626-4105-840d-85d43b095f8b","Type":"ContainerStarted","Data":"c375d927f3cadfd99e69c736202c7116257743c6a3b03d725027db35f7063dc9"} Oct 01 14:01:48 crc kubenswrapper[4605]: I1001 14:01:48.573316 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-86rqc" podStartSLOduration=6.591872432 podStartE2EDuration="11.573299524s" podCreationTimestamp="2025-10-01 14:01:37 +0000 UTC" firstStartedPulling="2025-10-01 14:01:42.322242159 +0000 UTC m=+1025.066218367" lastFinishedPulling="2025-10-01 14:01:47.303669251 +0000 UTC m=+1030.047645459" observedRunningTime="2025-10-01 14:01:48.571285553 +0000 UTC m=+1031.315261761" watchObservedRunningTime="2025-10-01 14:01:48.573299524 +0000 UTC m=+1031.317275732" Oct 01 14:01:53 crc kubenswrapper[4605]: I1001 14:01:53.123999 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-77585f5f8c-gxlcx" Oct 01 14:01:53 crc kubenswrapper[4605]: I1001 14:01:53.182010 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-698758b865-l45gl"] Oct 01 14:01:53 crc kubenswrapper[4605]: I1001 14:01:53.182301 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-698758b865-l45gl" podUID="b7288927-685b-405e-89a7-439e0f377750" containerName="dnsmasq-dns" containerID="cri-o://cf102b919f69638ceae752f31f439b4cb5183e2e1dff48390a33c1e2c8a81d88" gracePeriod=10 Oct 01 14:01:53 crc kubenswrapper[4605]: I1001 14:01:53.606168 4605 generic.go:334] "Generic (PLEG): container finished" podID="b7288927-685b-405e-89a7-439e0f377750" containerID="cf102b919f69638ceae752f31f439b4cb5183e2e1dff48390a33c1e2c8a81d88" exitCode=0 Oct 01 14:01:53 crc kubenswrapper[4605]: I1001 14:01:53.606229 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-l45gl" event={"ID":"b7288927-685b-405e-89a7-439e0f377750","Type":"ContainerDied","Data":"cf102b919f69638ceae752f31f439b4cb5183e2e1dff48390a33c1e2c8a81d88"} Oct 01 14:01:54 crc kubenswrapper[4605]: I1001 14:01:54.140881 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-l45gl" Oct 01 14:01:54 crc kubenswrapper[4605]: I1001 14:01:54.315927 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b7288927-685b-405e-89a7-439e0f377750-ovsdbserver-sb\") pod \"b7288927-685b-405e-89a7-439e0f377750\" (UID: \"b7288927-685b-405e-89a7-439e0f377750\") " Oct 01 14:01:54 crc kubenswrapper[4605]: I1001 14:01:54.316012 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hrtc7\" (UniqueName: \"kubernetes.io/projected/b7288927-685b-405e-89a7-439e0f377750-kube-api-access-hrtc7\") pod \"b7288927-685b-405e-89a7-439e0f377750\" (UID: \"b7288927-685b-405e-89a7-439e0f377750\") " Oct 01 14:01:54 crc kubenswrapper[4605]: I1001 14:01:54.316075 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b7288927-685b-405e-89a7-439e0f377750-ovsdbserver-nb\") pod \"b7288927-685b-405e-89a7-439e0f377750\" (UID: \"b7288927-685b-405e-89a7-439e0f377750\") " Oct 01 14:01:54 crc kubenswrapper[4605]: I1001 14:01:54.316153 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b7288927-685b-405e-89a7-439e0f377750-config\") pod \"b7288927-685b-405e-89a7-439e0f377750\" (UID: \"b7288927-685b-405e-89a7-439e0f377750\") " Oct 01 14:01:54 crc kubenswrapper[4605]: I1001 14:01:54.316293 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b7288927-685b-405e-89a7-439e0f377750-dns-svc\") pod \"b7288927-685b-405e-89a7-439e0f377750\" (UID: \"b7288927-685b-405e-89a7-439e0f377750\") " Oct 01 14:01:54 crc kubenswrapper[4605]: I1001 14:01:54.324076 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b7288927-685b-405e-89a7-439e0f377750-kube-api-access-hrtc7" (OuterVolumeSpecName: "kube-api-access-hrtc7") pod "b7288927-685b-405e-89a7-439e0f377750" (UID: "b7288927-685b-405e-89a7-439e0f377750"). InnerVolumeSpecName "kube-api-access-hrtc7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:01:54 crc kubenswrapper[4605]: I1001 14:01:54.373863 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b7288927-685b-405e-89a7-439e0f377750-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "b7288927-685b-405e-89a7-439e0f377750" (UID: "b7288927-685b-405e-89a7-439e0f377750"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:01:54 crc kubenswrapper[4605]: I1001 14:01:54.375545 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b7288927-685b-405e-89a7-439e0f377750-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "b7288927-685b-405e-89a7-439e0f377750" (UID: "b7288927-685b-405e-89a7-439e0f377750"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:01:54 crc kubenswrapper[4605]: I1001 14:01:54.396329 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b7288927-685b-405e-89a7-439e0f377750-config" (OuterVolumeSpecName: "config") pod "b7288927-685b-405e-89a7-439e0f377750" (UID: "b7288927-685b-405e-89a7-439e0f377750"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:01:54 crc kubenswrapper[4605]: I1001 14:01:54.419074 4605 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b7288927-685b-405e-89a7-439e0f377750-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 01 14:01:54 crc kubenswrapper[4605]: I1001 14:01:54.419122 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hrtc7\" (UniqueName: \"kubernetes.io/projected/b7288927-685b-405e-89a7-439e0f377750-kube-api-access-hrtc7\") on node \"crc\" DevicePath \"\"" Oct 01 14:01:54 crc kubenswrapper[4605]: I1001 14:01:54.419134 4605 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b7288927-685b-405e-89a7-439e0f377750-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 01 14:01:54 crc kubenswrapper[4605]: I1001 14:01:54.419143 4605 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b7288927-685b-405e-89a7-439e0f377750-config\") on node \"crc\" DevicePath \"\"" Oct 01 14:01:54 crc kubenswrapper[4605]: I1001 14:01:54.419640 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b7288927-685b-405e-89a7-439e0f377750-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "b7288927-685b-405e-89a7-439e0f377750" (UID: "b7288927-685b-405e-89a7-439e0f377750"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:01:54 crc kubenswrapper[4605]: I1001 14:01:54.520921 4605 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b7288927-685b-405e-89a7-439e0f377750-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 01 14:01:54 crc kubenswrapper[4605]: I1001 14:01:54.614063 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-l45gl" event={"ID":"b7288927-685b-405e-89a7-439e0f377750","Type":"ContainerDied","Data":"c8cc696bcd70110eea95c035b15222e9f75a53393f1e1a6d8938cb67f665c153"} Oct 01 14:01:54 crc kubenswrapper[4605]: I1001 14:01:54.614111 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-l45gl" Oct 01 14:01:54 crc kubenswrapper[4605]: I1001 14:01:54.614134 4605 scope.go:117] "RemoveContainer" containerID="cf102b919f69638ceae752f31f439b4cb5183e2e1dff48390a33c1e2c8a81d88" Oct 01 14:01:54 crc kubenswrapper[4605]: I1001 14:01:54.635196 4605 scope.go:117] "RemoveContainer" containerID="f1a7608914d7a5d4ccdb930db32904c77c0113e0fc1cfac288997fbc328d8715" Oct 01 14:01:54 crc kubenswrapper[4605]: I1001 14:01:54.649585 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-698758b865-l45gl"] Oct 01 14:01:54 crc kubenswrapper[4605]: I1001 14:01:54.657194 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-698758b865-l45gl"] Oct 01 14:01:55 crc kubenswrapper[4605]: I1001 14:01:55.937237 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b7288927-685b-405e-89a7-439e0f377750" path="/var/lib/kubelet/pods/b7288927-685b-405e-89a7-439e0f377750/volumes" Oct 01 14:01:56 crc kubenswrapper[4605]: I1001 14:01:56.685885 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-2e10-account-create-57khj"] Oct 01 14:01:56 crc kubenswrapper[4605]: E1001 14:01:56.686214 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92500ff4-ab25-4756-831e-b32aba1e71ff" containerName="mariadb-database-create" Oct 01 14:01:56 crc kubenswrapper[4605]: I1001 14:01:56.686230 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="92500ff4-ab25-4756-831e-b32aba1e71ff" containerName="mariadb-database-create" Oct 01 14:01:56 crc kubenswrapper[4605]: E1001 14:01:56.686246 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06a3c21a-ca5f-44a9-a6da-ea4ddb772f43" containerName="mariadb-database-create" Oct 01 14:01:56 crc kubenswrapper[4605]: I1001 14:01:56.686254 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="06a3c21a-ca5f-44a9-a6da-ea4ddb772f43" containerName="mariadb-database-create" Oct 01 14:01:56 crc kubenswrapper[4605]: E1001 14:01:56.686266 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7288927-685b-405e-89a7-439e0f377750" containerName="dnsmasq-dns" Oct 01 14:01:56 crc kubenswrapper[4605]: I1001 14:01:56.686272 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7288927-685b-405e-89a7-439e0f377750" containerName="dnsmasq-dns" Oct 01 14:01:56 crc kubenswrapper[4605]: E1001 14:01:56.686280 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="158e3a16-bd4b-45e2-be45-e8f36efc579d" containerName="mariadb-database-create" Oct 01 14:01:56 crc kubenswrapper[4605]: I1001 14:01:56.686285 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="158e3a16-bd4b-45e2-be45-e8f36efc579d" containerName="mariadb-database-create" Oct 01 14:01:56 crc kubenswrapper[4605]: E1001 14:01:56.686294 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7288927-685b-405e-89a7-439e0f377750" containerName="init" Oct 01 14:01:56 crc kubenswrapper[4605]: I1001 14:01:56.686301 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7288927-685b-405e-89a7-439e0f377750" containerName="init" Oct 01 14:01:56 crc kubenswrapper[4605]: I1001 14:01:56.686450 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="158e3a16-bd4b-45e2-be45-e8f36efc579d" containerName="mariadb-database-create" Oct 01 14:01:56 crc kubenswrapper[4605]: I1001 14:01:56.686479 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="92500ff4-ab25-4756-831e-b32aba1e71ff" containerName="mariadb-database-create" Oct 01 14:01:56 crc kubenswrapper[4605]: I1001 14:01:56.686491 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="06a3c21a-ca5f-44a9-a6da-ea4ddb772f43" containerName="mariadb-database-create" Oct 01 14:01:56 crc kubenswrapper[4605]: I1001 14:01:56.686502 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="b7288927-685b-405e-89a7-439e0f377750" containerName="dnsmasq-dns" Oct 01 14:01:56 crc kubenswrapper[4605]: I1001 14:01:56.687014 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-2e10-account-create-57khj" Oct 01 14:01:56 crc kubenswrapper[4605]: I1001 14:01:56.694309 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Oct 01 14:01:56 crc kubenswrapper[4605]: I1001 14:01:56.696663 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-2e10-account-create-57khj"] Oct 01 14:01:56 crc kubenswrapper[4605]: I1001 14:01:56.778212 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-70b3-account-create-kk9zv"] Oct 01 14:01:56 crc kubenswrapper[4605]: I1001 14:01:56.779195 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-70b3-account-create-kk9zv" Oct 01 14:01:56 crc kubenswrapper[4605]: I1001 14:01:56.785914 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Oct 01 14:01:56 crc kubenswrapper[4605]: I1001 14:01:56.796344 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-70b3-account-create-kk9zv"] Oct 01 14:01:56 crc kubenswrapper[4605]: I1001 14:01:56.856680 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cw4vv\" (UniqueName: \"kubernetes.io/projected/c9b91c60-f218-4df3-ad79-b40f7614f5dd-kube-api-access-cw4vv\") pod \"barbican-2e10-account-create-57khj\" (UID: \"c9b91c60-f218-4df3-ad79-b40f7614f5dd\") " pod="openstack/barbican-2e10-account-create-57khj" Oct 01 14:01:56 crc kubenswrapper[4605]: I1001 14:01:56.958145 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cw4vv\" (UniqueName: \"kubernetes.io/projected/c9b91c60-f218-4df3-ad79-b40f7614f5dd-kube-api-access-cw4vv\") pod \"barbican-2e10-account-create-57khj\" (UID: \"c9b91c60-f218-4df3-ad79-b40f7614f5dd\") " pod="openstack/barbican-2e10-account-create-57khj" Oct 01 14:01:56 crc kubenswrapper[4605]: I1001 14:01:56.958254 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rjxr6\" (UniqueName: \"kubernetes.io/projected/3fd1d985-bf1b-4b7e-9df9-f5a3ca363092-kube-api-access-rjxr6\") pod \"cinder-70b3-account-create-kk9zv\" (UID: \"3fd1d985-bf1b-4b7e-9df9-f5a3ca363092\") " pod="openstack/cinder-70b3-account-create-kk9zv" Oct 01 14:01:56 crc kubenswrapper[4605]: I1001 14:01:56.978722 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cw4vv\" (UniqueName: \"kubernetes.io/projected/c9b91c60-f218-4df3-ad79-b40f7614f5dd-kube-api-access-cw4vv\") pod \"barbican-2e10-account-create-57khj\" (UID: \"c9b91c60-f218-4df3-ad79-b40f7614f5dd\") " pod="openstack/barbican-2e10-account-create-57khj" Oct 01 14:01:56 crc kubenswrapper[4605]: I1001 14:01:56.998844 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-32cc-account-create-cbl5v"] Oct 01 14:01:57 crc kubenswrapper[4605]: I1001 14:01:57.000024 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-32cc-account-create-cbl5v" Oct 01 14:01:57 crc kubenswrapper[4605]: I1001 14:01:57.003075 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Oct 01 14:01:57 crc kubenswrapper[4605]: I1001 14:01:57.003495 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-2e10-account-create-57khj" Oct 01 14:01:57 crc kubenswrapper[4605]: I1001 14:01:57.017081 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-32cc-account-create-cbl5v"] Oct 01 14:01:57 crc kubenswrapper[4605]: I1001 14:01:57.059757 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rjxr6\" (UniqueName: \"kubernetes.io/projected/3fd1d985-bf1b-4b7e-9df9-f5a3ca363092-kube-api-access-rjxr6\") pod \"cinder-70b3-account-create-kk9zv\" (UID: \"3fd1d985-bf1b-4b7e-9df9-f5a3ca363092\") " pod="openstack/cinder-70b3-account-create-kk9zv" Oct 01 14:01:57 crc kubenswrapper[4605]: I1001 14:01:57.084246 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rjxr6\" (UniqueName: \"kubernetes.io/projected/3fd1d985-bf1b-4b7e-9df9-f5a3ca363092-kube-api-access-rjxr6\") pod \"cinder-70b3-account-create-kk9zv\" (UID: \"3fd1d985-bf1b-4b7e-9df9-f5a3ca363092\") " pod="openstack/cinder-70b3-account-create-kk9zv" Oct 01 14:01:57 crc kubenswrapper[4605]: I1001 14:01:57.098430 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-70b3-account-create-kk9zv" Oct 01 14:01:57 crc kubenswrapper[4605]: I1001 14:01:57.166280 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mmhwr\" (UniqueName: \"kubernetes.io/projected/a17b7512-23cb-4c81-a755-788fecd5ece2-kube-api-access-mmhwr\") pod \"neutron-32cc-account-create-cbl5v\" (UID: \"a17b7512-23cb-4c81-a755-788fecd5ece2\") " pod="openstack/neutron-32cc-account-create-cbl5v" Oct 01 14:01:57 crc kubenswrapper[4605]: I1001 14:01:57.267482 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mmhwr\" (UniqueName: \"kubernetes.io/projected/a17b7512-23cb-4c81-a755-788fecd5ece2-kube-api-access-mmhwr\") pod \"neutron-32cc-account-create-cbl5v\" (UID: \"a17b7512-23cb-4c81-a755-788fecd5ece2\") " pod="openstack/neutron-32cc-account-create-cbl5v" Oct 01 14:01:57 crc kubenswrapper[4605]: I1001 14:01:57.290730 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mmhwr\" (UniqueName: \"kubernetes.io/projected/a17b7512-23cb-4c81-a755-788fecd5ece2-kube-api-access-mmhwr\") pod \"neutron-32cc-account-create-cbl5v\" (UID: \"a17b7512-23cb-4c81-a755-788fecd5ece2\") " pod="openstack/neutron-32cc-account-create-cbl5v" Oct 01 14:01:57 crc kubenswrapper[4605]: I1001 14:01:57.484606 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-32cc-account-create-cbl5v" Oct 01 14:01:57 crc kubenswrapper[4605]: I1001 14:01:57.506481 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-2e10-account-create-57khj"] Oct 01 14:01:57 crc kubenswrapper[4605]: W1001 14:01:57.519295 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc9b91c60_f218_4df3_ad79_b40f7614f5dd.slice/crio-778ae29f68a0a64a47a903ea45845a9190cb425f14d006806f2e230edb8d8408 WatchSource:0}: Error finding container 778ae29f68a0a64a47a903ea45845a9190cb425f14d006806f2e230edb8d8408: Status 404 returned error can't find the container with id 778ae29f68a0a64a47a903ea45845a9190cb425f14d006806f2e230edb8d8408 Oct 01 14:01:57 crc kubenswrapper[4605]: I1001 14:01:57.624075 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-70b3-account-create-kk9zv"] Oct 01 14:01:57 crc kubenswrapper[4605]: I1001 14:01:57.639575 4605 generic.go:334] "Generic (PLEG): container finished" podID="a204e126-3626-4105-840d-85d43b095f8b" containerID="c375d927f3cadfd99e69c736202c7116257743c6a3b03d725027db35f7063dc9" exitCode=0 Oct 01 14:01:57 crc kubenswrapper[4605]: I1001 14:01:57.639624 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-86rqc" event={"ID":"a204e126-3626-4105-840d-85d43b095f8b","Type":"ContainerDied","Data":"c375d927f3cadfd99e69c736202c7116257743c6a3b03d725027db35f7063dc9"} Oct 01 14:01:57 crc kubenswrapper[4605]: W1001 14:01:57.653587 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3fd1d985_bf1b_4b7e_9df9_f5a3ca363092.slice/crio-faf65302720a8d5983173f3b16ff76513fa48de3c08a0a3dd796b441c5a0da1a WatchSource:0}: Error finding container faf65302720a8d5983173f3b16ff76513fa48de3c08a0a3dd796b441c5a0da1a: Status 404 returned error can't find the container with id faf65302720a8d5983173f3b16ff76513fa48de3c08a0a3dd796b441c5a0da1a Oct 01 14:01:57 crc kubenswrapper[4605]: I1001 14:01:57.661340 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-2e10-account-create-57khj" event={"ID":"c9b91c60-f218-4df3-ad79-b40f7614f5dd","Type":"ContainerStarted","Data":"778ae29f68a0a64a47a903ea45845a9190cb425f14d006806f2e230edb8d8408"} Oct 01 14:01:57 crc kubenswrapper[4605]: W1001 14:01:57.935159 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda17b7512_23cb_4c81_a755_788fecd5ece2.slice/crio-6b50013509f37444e811add0fe3454295dc9baa8f7534fc1e24095c1b85fc6d9 WatchSource:0}: Error finding container 6b50013509f37444e811add0fe3454295dc9baa8f7534fc1e24095c1b85fc6d9: Status 404 returned error can't find the container with id 6b50013509f37444e811add0fe3454295dc9baa8f7534fc1e24095c1b85fc6d9 Oct 01 14:01:57 crc kubenswrapper[4605]: I1001 14:01:57.943037 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-32cc-account-create-cbl5v"] Oct 01 14:01:58 crc kubenswrapper[4605]: I1001 14:01:58.670723 4605 generic.go:334] "Generic (PLEG): container finished" podID="a9489bed-ef81-40d8-8a3e-1cc162ced1b6" containerID="b4e365d933fae88e574955c8f9db28bf719af1d77a020daeb9f4f55ae9f7eea1" exitCode=0 Oct 01 14:01:58 crc kubenswrapper[4605]: I1001 14:01:58.670799 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-l69r5" event={"ID":"a9489bed-ef81-40d8-8a3e-1cc162ced1b6","Type":"ContainerDied","Data":"b4e365d933fae88e574955c8f9db28bf719af1d77a020daeb9f4f55ae9f7eea1"} Oct 01 14:01:58 crc kubenswrapper[4605]: I1001 14:01:58.672213 4605 generic.go:334] "Generic (PLEG): container finished" podID="a17b7512-23cb-4c81-a755-788fecd5ece2" containerID="d5bca519a32c6b26704959dbde2c064e93fc1a192869bc083ddbccb63bd00d41" exitCode=0 Oct 01 14:01:58 crc kubenswrapper[4605]: I1001 14:01:58.672275 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-32cc-account-create-cbl5v" event={"ID":"a17b7512-23cb-4c81-a755-788fecd5ece2","Type":"ContainerDied","Data":"d5bca519a32c6b26704959dbde2c064e93fc1a192869bc083ddbccb63bd00d41"} Oct 01 14:01:58 crc kubenswrapper[4605]: I1001 14:01:58.672294 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-32cc-account-create-cbl5v" event={"ID":"a17b7512-23cb-4c81-a755-788fecd5ece2","Type":"ContainerStarted","Data":"6b50013509f37444e811add0fe3454295dc9baa8f7534fc1e24095c1b85fc6d9"} Oct 01 14:01:58 crc kubenswrapper[4605]: I1001 14:01:58.675677 4605 generic.go:334] "Generic (PLEG): container finished" podID="3fd1d985-bf1b-4b7e-9df9-f5a3ca363092" containerID="9f92cb5c0d44c8434266608af6c70687d04a2eeec764a779f86548a949fc9fbe" exitCode=0 Oct 01 14:01:58 crc kubenswrapper[4605]: I1001 14:01:58.675870 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-70b3-account-create-kk9zv" event={"ID":"3fd1d985-bf1b-4b7e-9df9-f5a3ca363092","Type":"ContainerDied","Data":"9f92cb5c0d44c8434266608af6c70687d04a2eeec764a779f86548a949fc9fbe"} Oct 01 14:01:58 crc kubenswrapper[4605]: I1001 14:01:58.675929 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-70b3-account-create-kk9zv" event={"ID":"3fd1d985-bf1b-4b7e-9df9-f5a3ca363092","Type":"ContainerStarted","Data":"faf65302720a8d5983173f3b16ff76513fa48de3c08a0a3dd796b441c5a0da1a"} Oct 01 14:01:58 crc kubenswrapper[4605]: I1001 14:01:58.681847 4605 generic.go:334] "Generic (PLEG): container finished" podID="c9b91c60-f218-4df3-ad79-b40f7614f5dd" containerID="691f8c0bb7a0d3ce9fe779a35a8a542df3c03aee0ed2bddffb343283c537560d" exitCode=0 Oct 01 14:01:58 crc kubenswrapper[4605]: I1001 14:01:58.682128 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-2e10-account-create-57khj" event={"ID":"c9b91c60-f218-4df3-ad79-b40f7614f5dd","Type":"ContainerDied","Data":"691f8c0bb7a0d3ce9fe779a35a8a542df3c03aee0ed2bddffb343283c537560d"} Oct 01 14:01:59 crc kubenswrapper[4605]: I1001 14:01:59.011917 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-86rqc" Oct 01 14:01:59 crc kubenswrapper[4605]: I1001 14:01:59.199491 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a204e126-3626-4105-840d-85d43b095f8b-combined-ca-bundle\") pod \"a204e126-3626-4105-840d-85d43b095f8b\" (UID: \"a204e126-3626-4105-840d-85d43b095f8b\") " Oct 01 14:01:59 crc kubenswrapper[4605]: I1001 14:01:59.199570 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-brtd2\" (UniqueName: \"kubernetes.io/projected/a204e126-3626-4105-840d-85d43b095f8b-kube-api-access-brtd2\") pod \"a204e126-3626-4105-840d-85d43b095f8b\" (UID: \"a204e126-3626-4105-840d-85d43b095f8b\") " Oct 01 14:01:59 crc kubenswrapper[4605]: I1001 14:01:59.199636 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a204e126-3626-4105-840d-85d43b095f8b-config-data\") pod \"a204e126-3626-4105-840d-85d43b095f8b\" (UID: \"a204e126-3626-4105-840d-85d43b095f8b\") " Oct 01 14:01:59 crc kubenswrapper[4605]: I1001 14:01:59.210358 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a204e126-3626-4105-840d-85d43b095f8b-kube-api-access-brtd2" (OuterVolumeSpecName: "kube-api-access-brtd2") pod "a204e126-3626-4105-840d-85d43b095f8b" (UID: "a204e126-3626-4105-840d-85d43b095f8b"). InnerVolumeSpecName "kube-api-access-brtd2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:01:59 crc kubenswrapper[4605]: I1001 14:01:59.228598 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a204e126-3626-4105-840d-85d43b095f8b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a204e126-3626-4105-840d-85d43b095f8b" (UID: "a204e126-3626-4105-840d-85d43b095f8b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:01:59 crc kubenswrapper[4605]: I1001 14:01:59.259398 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a204e126-3626-4105-840d-85d43b095f8b-config-data" (OuterVolumeSpecName: "config-data") pod "a204e126-3626-4105-840d-85d43b095f8b" (UID: "a204e126-3626-4105-840d-85d43b095f8b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:01:59 crc kubenswrapper[4605]: I1001 14:01:59.300940 4605 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a204e126-3626-4105-840d-85d43b095f8b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 14:01:59 crc kubenswrapper[4605]: I1001 14:01:59.300970 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-brtd2\" (UniqueName: \"kubernetes.io/projected/a204e126-3626-4105-840d-85d43b095f8b-kube-api-access-brtd2\") on node \"crc\" DevicePath \"\"" Oct 01 14:01:59 crc kubenswrapper[4605]: I1001 14:01:59.300980 4605 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a204e126-3626-4105-840d-85d43b095f8b-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 14:01:59 crc kubenswrapper[4605]: I1001 14:01:59.692356 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-86rqc" event={"ID":"a204e126-3626-4105-840d-85d43b095f8b","Type":"ContainerDied","Data":"441081bb25b1f7185bb9227d3b366c1c967805e36e3e206346b4a5012082058f"} Oct 01 14:01:59 crc kubenswrapper[4605]: I1001 14:01:59.692408 4605 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="441081bb25b1f7185bb9227d3b366c1c967805e36e3e206346b4a5012082058f" Oct 01 14:01:59 crc kubenswrapper[4605]: I1001 14:01:59.692492 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-86rqc" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.076168 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-55fff446b9-f5jjw"] Oct 01 14:02:00 crc kubenswrapper[4605]: E1001 14:02:00.076774 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a204e126-3626-4105-840d-85d43b095f8b" containerName="keystone-db-sync" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.076793 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="a204e126-3626-4105-840d-85d43b095f8b" containerName="keystone-db-sync" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.077000 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="a204e126-3626-4105-840d-85d43b095f8b" containerName="keystone-db-sync" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.087133 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55fff446b9-f5jjw" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.096810 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-5qt7p"] Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.107973 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-5qt7p" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.113379 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.116262 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.116451 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.116534 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-t7vm5" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.130330 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-55fff446b9-f5jjw"] Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.190956 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-5qt7p"] Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.214135 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5fn4m\" (UniqueName: \"kubernetes.io/projected/676e0fa1-794f-4acf-9c92-1405d3fff0f1-kube-api-access-5fn4m\") pod \"dnsmasq-dns-55fff446b9-f5jjw\" (UID: \"676e0fa1-794f-4acf-9c92-1405d3fff0f1\") " pod="openstack/dnsmasq-dns-55fff446b9-f5jjw" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.214189 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0aafc049-c1db-4e02-a3fa-07274790c6f6-combined-ca-bundle\") pod \"keystone-bootstrap-5qt7p\" (UID: \"0aafc049-c1db-4e02-a3fa-07274790c6f6\") " pod="openstack/keystone-bootstrap-5qt7p" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.214230 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/676e0fa1-794f-4acf-9c92-1405d3fff0f1-dns-swift-storage-0\") pod \"dnsmasq-dns-55fff446b9-f5jjw\" (UID: \"676e0fa1-794f-4acf-9c92-1405d3fff0f1\") " pod="openstack/dnsmasq-dns-55fff446b9-f5jjw" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.214263 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0aafc049-c1db-4e02-a3fa-07274790c6f6-fernet-keys\") pod \"keystone-bootstrap-5qt7p\" (UID: \"0aafc049-c1db-4e02-a3fa-07274790c6f6\") " pod="openstack/keystone-bootstrap-5qt7p" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.214284 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/676e0fa1-794f-4acf-9c92-1405d3fff0f1-config\") pod \"dnsmasq-dns-55fff446b9-f5jjw\" (UID: \"676e0fa1-794f-4acf-9c92-1405d3fff0f1\") " pod="openstack/dnsmasq-dns-55fff446b9-f5jjw" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.214312 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/0aafc049-c1db-4e02-a3fa-07274790c6f6-credential-keys\") pod \"keystone-bootstrap-5qt7p\" (UID: \"0aafc049-c1db-4e02-a3fa-07274790c6f6\") " pod="openstack/keystone-bootstrap-5qt7p" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.214338 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0aafc049-c1db-4e02-a3fa-07274790c6f6-config-data\") pod \"keystone-bootstrap-5qt7p\" (UID: \"0aafc049-c1db-4e02-a3fa-07274790c6f6\") " pod="openstack/keystone-bootstrap-5qt7p" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.214359 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w97m4\" (UniqueName: \"kubernetes.io/projected/0aafc049-c1db-4e02-a3fa-07274790c6f6-kube-api-access-w97m4\") pod \"keystone-bootstrap-5qt7p\" (UID: \"0aafc049-c1db-4e02-a3fa-07274790c6f6\") " pod="openstack/keystone-bootstrap-5qt7p" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.214380 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/676e0fa1-794f-4acf-9c92-1405d3fff0f1-dns-svc\") pod \"dnsmasq-dns-55fff446b9-f5jjw\" (UID: \"676e0fa1-794f-4acf-9c92-1405d3fff0f1\") " pod="openstack/dnsmasq-dns-55fff446b9-f5jjw" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.214394 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/676e0fa1-794f-4acf-9c92-1405d3fff0f1-ovsdbserver-sb\") pod \"dnsmasq-dns-55fff446b9-f5jjw\" (UID: \"676e0fa1-794f-4acf-9c92-1405d3fff0f1\") " pod="openstack/dnsmasq-dns-55fff446b9-f5jjw" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.214436 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0aafc049-c1db-4e02-a3fa-07274790c6f6-scripts\") pod \"keystone-bootstrap-5qt7p\" (UID: \"0aafc049-c1db-4e02-a3fa-07274790c6f6\") " pod="openstack/keystone-bootstrap-5qt7p" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.214450 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/676e0fa1-794f-4acf-9c92-1405d3fff0f1-ovsdbserver-nb\") pod \"dnsmasq-dns-55fff446b9-f5jjw\" (UID: \"676e0fa1-794f-4acf-9c92-1405d3fff0f1\") " pod="openstack/dnsmasq-dns-55fff446b9-f5jjw" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.316050 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/676e0fa1-794f-4acf-9c92-1405d3fff0f1-dns-swift-storage-0\") pod \"dnsmasq-dns-55fff446b9-f5jjw\" (UID: \"676e0fa1-794f-4acf-9c92-1405d3fff0f1\") " pod="openstack/dnsmasq-dns-55fff446b9-f5jjw" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.316131 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0aafc049-c1db-4e02-a3fa-07274790c6f6-fernet-keys\") pod \"keystone-bootstrap-5qt7p\" (UID: \"0aafc049-c1db-4e02-a3fa-07274790c6f6\") " pod="openstack/keystone-bootstrap-5qt7p" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.316155 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/676e0fa1-794f-4acf-9c92-1405d3fff0f1-config\") pod \"dnsmasq-dns-55fff446b9-f5jjw\" (UID: \"676e0fa1-794f-4acf-9c92-1405d3fff0f1\") " pod="openstack/dnsmasq-dns-55fff446b9-f5jjw" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.316185 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/0aafc049-c1db-4e02-a3fa-07274790c6f6-credential-keys\") pod \"keystone-bootstrap-5qt7p\" (UID: \"0aafc049-c1db-4e02-a3fa-07274790c6f6\") " pod="openstack/keystone-bootstrap-5qt7p" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.316213 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0aafc049-c1db-4e02-a3fa-07274790c6f6-config-data\") pod \"keystone-bootstrap-5qt7p\" (UID: \"0aafc049-c1db-4e02-a3fa-07274790c6f6\") " pod="openstack/keystone-bootstrap-5qt7p" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.316233 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w97m4\" (UniqueName: \"kubernetes.io/projected/0aafc049-c1db-4e02-a3fa-07274790c6f6-kube-api-access-w97m4\") pod \"keystone-bootstrap-5qt7p\" (UID: \"0aafc049-c1db-4e02-a3fa-07274790c6f6\") " pod="openstack/keystone-bootstrap-5qt7p" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.316256 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/676e0fa1-794f-4acf-9c92-1405d3fff0f1-ovsdbserver-sb\") pod \"dnsmasq-dns-55fff446b9-f5jjw\" (UID: \"676e0fa1-794f-4acf-9c92-1405d3fff0f1\") " pod="openstack/dnsmasq-dns-55fff446b9-f5jjw" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.316270 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/676e0fa1-794f-4acf-9c92-1405d3fff0f1-dns-svc\") pod \"dnsmasq-dns-55fff446b9-f5jjw\" (UID: \"676e0fa1-794f-4acf-9c92-1405d3fff0f1\") " pod="openstack/dnsmasq-dns-55fff446b9-f5jjw" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.316310 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0aafc049-c1db-4e02-a3fa-07274790c6f6-scripts\") pod \"keystone-bootstrap-5qt7p\" (UID: \"0aafc049-c1db-4e02-a3fa-07274790c6f6\") " pod="openstack/keystone-bootstrap-5qt7p" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.316326 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/676e0fa1-794f-4acf-9c92-1405d3fff0f1-ovsdbserver-nb\") pod \"dnsmasq-dns-55fff446b9-f5jjw\" (UID: \"676e0fa1-794f-4acf-9c92-1405d3fff0f1\") " pod="openstack/dnsmasq-dns-55fff446b9-f5jjw" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.316346 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5fn4m\" (UniqueName: \"kubernetes.io/projected/676e0fa1-794f-4acf-9c92-1405d3fff0f1-kube-api-access-5fn4m\") pod \"dnsmasq-dns-55fff446b9-f5jjw\" (UID: \"676e0fa1-794f-4acf-9c92-1405d3fff0f1\") " pod="openstack/dnsmasq-dns-55fff446b9-f5jjw" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.316370 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0aafc049-c1db-4e02-a3fa-07274790c6f6-combined-ca-bundle\") pod \"keystone-bootstrap-5qt7p\" (UID: \"0aafc049-c1db-4e02-a3fa-07274790c6f6\") " pod="openstack/keystone-bootstrap-5qt7p" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.317647 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/676e0fa1-794f-4acf-9c92-1405d3fff0f1-dns-swift-storage-0\") pod \"dnsmasq-dns-55fff446b9-f5jjw\" (UID: \"676e0fa1-794f-4acf-9c92-1405d3fff0f1\") " pod="openstack/dnsmasq-dns-55fff446b9-f5jjw" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.319372 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/676e0fa1-794f-4acf-9c92-1405d3fff0f1-ovsdbserver-nb\") pod \"dnsmasq-dns-55fff446b9-f5jjw\" (UID: \"676e0fa1-794f-4acf-9c92-1405d3fff0f1\") " pod="openstack/dnsmasq-dns-55fff446b9-f5jjw" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.319891 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/676e0fa1-794f-4acf-9c92-1405d3fff0f1-config\") pod \"dnsmasq-dns-55fff446b9-f5jjw\" (UID: \"676e0fa1-794f-4acf-9c92-1405d3fff0f1\") " pod="openstack/dnsmasq-dns-55fff446b9-f5jjw" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.324379 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/676e0fa1-794f-4acf-9c92-1405d3fff0f1-ovsdbserver-sb\") pod \"dnsmasq-dns-55fff446b9-f5jjw\" (UID: \"676e0fa1-794f-4acf-9c92-1405d3fff0f1\") " pod="openstack/dnsmasq-dns-55fff446b9-f5jjw" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.324429 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/676e0fa1-794f-4acf-9c92-1405d3fff0f1-dns-svc\") pod \"dnsmasq-dns-55fff446b9-f5jjw\" (UID: \"676e0fa1-794f-4acf-9c92-1405d3fff0f1\") " pod="openstack/dnsmasq-dns-55fff446b9-f5jjw" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.324914 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0aafc049-c1db-4e02-a3fa-07274790c6f6-scripts\") pod \"keystone-bootstrap-5qt7p\" (UID: \"0aafc049-c1db-4e02-a3fa-07274790c6f6\") " pod="openstack/keystone-bootstrap-5qt7p" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.331655 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0aafc049-c1db-4e02-a3fa-07274790c6f6-combined-ca-bundle\") pod \"keystone-bootstrap-5qt7p\" (UID: \"0aafc049-c1db-4e02-a3fa-07274790c6f6\") " pod="openstack/keystone-bootstrap-5qt7p" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.334735 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0aafc049-c1db-4e02-a3fa-07274790c6f6-config-data\") pod \"keystone-bootstrap-5qt7p\" (UID: \"0aafc049-c1db-4e02-a3fa-07274790c6f6\") " pod="openstack/keystone-bootstrap-5qt7p" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.350676 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/0aafc049-c1db-4e02-a3fa-07274790c6f6-credential-keys\") pod \"keystone-bootstrap-5qt7p\" (UID: \"0aafc049-c1db-4e02-a3fa-07274790c6f6\") " pod="openstack/keystone-bootstrap-5qt7p" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.352731 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0aafc049-c1db-4e02-a3fa-07274790c6f6-fernet-keys\") pod \"keystone-bootstrap-5qt7p\" (UID: \"0aafc049-c1db-4e02-a3fa-07274790c6f6\") " pod="openstack/keystone-bootstrap-5qt7p" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.359013 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w97m4\" (UniqueName: \"kubernetes.io/projected/0aafc049-c1db-4e02-a3fa-07274790c6f6-kube-api-access-w97m4\") pod \"keystone-bootstrap-5qt7p\" (UID: \"0aafc049-c1db-4e02-a3fa-07274790c6f6\") " pod="openstack/keystone-bootstrap-5qt7p" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.374637 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-f76447ddf-ghwkf"] Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.375948 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-f76447ddf-ghwkf" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.380775 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-scripts" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.380969 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon-horizon-dockercfg-wpb7w" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.381239 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-config-data" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.381354 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.390000 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-f76447ddf-ghwkf"] Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.405870 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5fn4m\" (UniqueName: \"kubernetes.io/projected/676e0fa1-794f-4acf-9c92-1405d3fff0f1-kube-api-access-5fn4m\") pod \"dnsmasq-dns-55fff446b9-f5jjw\" (UID: \"676e0fa1-794f-4acf-9c92-1405d3fff0f1\") " pod="openstack/dnsmasq-dns-55fff446b9-f5jjw" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.421564 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55fff446b9-f5jjw" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.435676 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-5qt7p" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.496275 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-70b3-account-create-kk9zv" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.521559 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/f94fe2a1-eaf1-4f5d-9f7e-4e789f46d81c-horizon-secret-key\") pod \"horizon-f76447ddf-ghwkf\" (UID: \"f94fe2a1-eaf1-4f5d-9f7e-4e789f46d81c\") " pod="openstack/horizon-f76447ddf-ghwkf" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.521627 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f94fe2a1-eaf1-4f5d-9f7e-4e789f46d81c-logs\") pod \"horizon-f76447ddf-ghwkf\" (UID: \"f94fe2a1-eaf1-4f5d-9f7e-4e789f46d81c\") " pod="openstack/horizon-f76447ddf-ghwkf" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.521652 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f94fe2a1-eaf1-4f5d-9f7e-4e789f46d81c-scripts\") pod \"horizon-f76447ddf-ghwkf\" (UID: \"f94fe2a1-eaf1-4f5d-9f7e-4e789f46d81c\") " pod="openstack/horizon-f76447ddf-ghwkf" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.521699 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f94fe2a1-eaf1-4f5d-9f7e-4e789f46d81c-config-data\") pod \"horizon-f76447ddf-ghwkf\" (UID: \"f94fe2a1-eaf1-4f5d-9f7e-4e789f46d81c\") " pod="openstack/horizon-f76447ddf-ghwkf" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.521721 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nlgtj\" (UniqueName: \"kubernetes.io/projected/f94fe2a1-eaf1-4f5d-9f7e-4e789f46d81c-kube-api-access-nlgtj\") pod \"horizon-f76447ddf-ghwkf\" (UID: \"f94fe2a1-eaf1-4f5d-9f7e-4e789f46d81c\") " pod="openstack/horizon-f76447ddf-ghwkf" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.622775 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rjxr6\" (UniqueName: \"kubernetes.io/projected/3fd1d985-bf1b-4b7e-9df9-f5a3ca363092-kube-api-access-rjxr6\") pod \"3fd1d985-bf1b-4b7e-9df9-f5a3ca363092\" (UID: \"3fd1d985-bf1b-4b7e-9df9-f5a3ca363092\") " Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.623028 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f94fe2a1-eaf1-4f5d-9f7e-4e789f46d81c-config-data\") pod \"horizon-f76447ddf-ghwkf\" (UID: \"f94fe2a1-eaf1-4f5d-9f7e-4e789f46d81c\") " pod="openstack/horizon-f76447ddf-ghwkf" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.623056 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nlgtj\" (UniqueName: \"kubernetes.io/projected/f94fe2a1-eaf1-4f5d-9f7e-4e789f46d81c-kube-api-access-nlgtj\") pod \"horizon-f76447ddf-ghwkf\" (UID: \"f94fe2a1-eaf1-4f5d-9f7e-4e789f46d81c\") " pod="openstack/horizon-f76447ddf-ghwkf" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.623134 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/f94fe2a1-eaf1-4f5d-9f7e-4e789f46d81c-horizon-secret-key\") pod \"horizon-f76447ddf-ghwkf\" (UID: \"f94fe2a1-eaf1-4f5d-9f7e-4e789f46d81c\") " pod="openstack/horizon-f76447ddf-ghwkf" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.623173 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f94fe2a1-eaf1-4f5d-9f7e-4e789f46d81c-logs\") pod \"horizon-f76447ddf-ghwkf\" (UID: \"f94fe2a1-eaf1-4f5d-9f7e-4e789f46d81c\") " pod="openstack/horizon-f76447ddf-ghwkf" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.623194 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f94fe2a1-eaf1-4f5d-9f7e-4e789f46d81c-scripts\") pod \"horizon-f76447ddf-ghwkf\" (UID: \"f94fe2a1-eaf1-4f5d-9f7e-4e789f46d81c\") " pod="openstack/horizon-f76447ddf-ghwkf" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.623825 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f94fe2a1-eaf1-4f5d-9f7e-4e789f46d81c-scripts\") pod \"horizon-f76447ddf-ghwkf\" (UID: \"f94fe2a1-eaf1-4f5d-9f7e-4e789f46d81c\") " pod="openstack/horizon-f76447ddf-ghwkf" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.624540 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f94fe2a1-eaf1-4f5d-9f7e-4e789f46d81c-config-data\") pod \"horizon-f76447ddf-ghwkf\" (UID: \"f94fe2a1-eaf1-4f5d-9f7e-4e789f46d81c\") " pod="openstack/horizon-f76447ddf-ghwkf" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.627377 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f94fe2a1-eaf1-4f5d-9f7e-4e789f46d81c-logs\") pod \"horizon-f76447ddf-ghwkf\" (UID: \"f94fe2a1-eaf1-4f5d-9f7e-4e789f46d81c\") " pod="openstack/horizon-f76447ddf-ghwkf" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.641862 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3fd1d985-bf1b-4b7e-9df9-f5a3ca363092-kube-api-access-rjxr6" (OuterVolumeSpecName: "kube-api-access-rjxr6") pod "3fd1d985-bf1b-4b7e-9df9-f5a3ca363092" (UID: "3fd1d985-bf1b-4b7e-9df9-f5a3ca363092"). InnerVolumeSpecName "kube-api-access-rjxr6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.648147 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-px7rk"] Oct 01 14:02:00 crc kubenswrapper[4605]: E1001 14:02:00.648559 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3fd1d985-bf1b-4b7e-9df9-f5a3ca363092" containerName="mariadb-account-create" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.648575 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="3fd1d985-bf1b-4b7e-9df9-f5a3ca363092" containerName="mariadb-account-create" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.648736 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="3fd1d985-bf1b-4b7e-9df9-f5a3ca363092" containerName="mariadb-account-create" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.649294 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-px7rk" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.653203 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-5d459df97c-xmd6q"] Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.655022 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/f94fe2a1-eaf1-4f5d-9f7e-4e789f46d81c-horizon-secret-key\") pod \"horizon-f76447ddf-ghwkf\" (UID: \"f94fe2a1-eaf1-4f5d-9f7e-4e789f46d81c\") " pod="openstack/horizon-f76447ddf-ghwkf" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.655103 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.655191 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.655258 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-5tvxv" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.655690 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5d459df97c-xmd6q" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.663544 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-px7rk"] Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.669941 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nlgtj\" (UniqueName: \"kubernetes.io/projected/f94fe2a1-eaf1-4f5d-9f7e-4e789f46d81c-kube-api-access-nlgtj\") pod \"horizon-f76447ddf-ghwkf\" (UID: \"f94fe2a1-eaf1-4f5d-9f7e-4e789f46d81c\") " pod="openstack/horizon-f76447ddf-ghwkf" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.687448 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-55fff446b9-f5jjw"] Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.725176 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/2cc1af3d-7d60-4b25-9b13-d563d3f0e31b-horizon-secret-key\") pod \"horizon-5d459df97c-xmd6q\" (UID: \"2cc1af3d-7d60-4b25-9b13-d563d3f0e31b\") " pod="openstack/horizon-5d459df97c-xmd6q" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.725220 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a3bd053b-19f8-4908-b8d5-e5c0ae5599c0-logs\") pod \"placement-db-sync-px7rk\" (UID: \"a3bd053b-19f8-4908-b8d5-e5c0ae5599c0\") " pod="openstack/placement-db-sync-px7rk" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.725237 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2cc1af3d-7d60-4b25-9b13-d563d3f0e31b-logs\") pod \"horizon-5d459df97c-xmd6q\" (UID: \"2cc1af3d-7d60-4b25-9b13-d563d3f0e31b\") " pod="openstack/horizon-5d459df97c-xmd6q" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.725258 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a3bd053b-19f8-4908-b8d5-e5c0ae5599c0-scripts\") pod \"placement-db-sync-px7rk\" (UID: \"a3bd053b-19f8-4908-b8d5-e5c0ae5599c0\") " pod="openstack/placement-db-sync-px7rk" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.725290 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mkrtd\" (UniqueName: \"kubernetes.io/projected/a3bd053b-19f8-4908-b8d5-e5c0ae5599c0-kube-api-access-mkrtd\") pod \"placement-db-sync-px7rk\" (UID: \"a3bd053b-19f8-4908-b8d5-e5c0ae5599c0\") " pod="openstack/placement-db-sync-px7rk" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.725306 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2cc1af3d-7d60-4b25-9b13-d563d3f0e31b-scripts\") pod \"horizon-5d459df97c-xmd6q\" (UID: \"2cc1af3d-7d60-4b25-9b13-d563d3f0e31b\") " pod="openstack/horizon-5d459df97c-xmd6q" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.725320 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n8nk8\" (UniqueName: \"kubernetes.io/projected/2cc1af3d-7d60-4b25-9b13-d563d3f0e31b-kube-api-access-n8nk8\") pod \"horizon-5d459df97c-xmd6q\" (UID: \"2cc1af3d-7d60-4b25-9b13-d563d3f0e31b\") " pod="openstack/horizon-5d459df97c-xmd6q" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.725367 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a3bd053b-19f8-4908-b8d5-e5c0ae5599c0-config-data\") pod \"placement-db-sync-px7rk\" (UID: \"a3bd053b-19f8-4908-b8d5-e5c0ae5599c0\") " pod="openstack/placement-db-sync-px7rk" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.725410 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3bd053b-19f8-4908-b8d5-e5c0ae5599c0-combined-ca-bundle\") pod \"placement-db-sync-px7rk\" (UID: \"a3bd053b-19f8-4908-b8d5-e5c0ae5599c0\") " pod="openstack/placement-db-sync-px7rk" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.725433 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2cc1af3d-7d60-4b25-9b13-d563d3f0e31b-config-data\") pod \"horizon-5d459df97c-xmd6q\" (UID: \"2cc1af3d-7d60-4b25-9b13-d563d3f0e31b\") " pod="openstack/horizon-5d459df97c-xmd6q" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.725493 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rjxr6\" (UniqueName: \"kubernetes.io/projected/3fd1d985-bf1b-4b7e-9df9-f5a3ca363092-kube-api-access-rjxr6\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.764783 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-5d459df97c-xmd6q"] Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.781693 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-70b3-account-create-kk9zv" event={"ID":"3fd1d985-bf1b-4b7e-9df9-f5a3ca363092","Type":"ContainerDied","Data":"faf65302720a8d5983173f3b16ff76513fa48de3c08a0a3dd796b441c5a0da1a"} Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.781879 4605 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="faf65302720a8d5983173f3b16ff76513fa48de3c08a0a3dd796b441c5a0da1a" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.781770 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-70b3-account-create-kk9zv" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.789134 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-76fcf4b695-ntvfv"] Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.790648 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76fcf4b695-ntvfv" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.798643 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-f76447ddf-ghwkf" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.831733 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-76fcf4b695-ntvfv"] Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.839392 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a3bd053b-19f8-4908-b8d5-e5c0ae5599c0-config-data\") pod \"placement-db-sync-px7rk\" (UID: \"a3bd053b-19f8-4908-b8d5-e5c0ae5599c0\") " pod="openstack/placement-db-sync-px7rk" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.839624 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3bd053b-19f8-4908-b8d5-e5c0ae5599c0-combined-ca-bundle\") pod \"placement-db-sync-px7rk\" (UID: \"a3bd053b-19f8-4908-b8d5-e5c0ae5599c0\") " pod="openstack/placement-db-sync-px7rk" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.839729 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2cc1af3d-7d60-4b25-9b13-d563d3f0e31b-config-data\") pod \"horizon-5d459df97c-xmd6q\" (UID: \"2cc1af3d-7d60-4b25-9b13-d563d3f0e31b\") " pod="openstack/horizon-5d459df97c-xmd6q" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.839851 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/2cc1af3d-7d60-4b25-9b13-d563d3f0e31b-horizon-secret-key\") pod \"horizon-5d459df97c-xmd6q\" (UID: \"2cc1af3d-7d60-4b25-9b13-d563d3f0e31b\") " pod="openstack/horizon-5d459df97c-xmd6q" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.839939 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a3bd053b-19f8-4908-b8d5-e5c0ae5599c0-logs\") pod \"placement-db-sync-px7rk\" (UID: \"a3bd053b-19f8-4908-b8d5-e5c0ae5599c0\") " pod="openstack/placement-db-sync-px7rk" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.840001 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2cc1af3d-7d60-4b25-9b13-d563d3f0e31b-logs\") pod \"horizon-5d459df97c-xmd6q\" (UID: \"2cc1af3d-7d60-4b25-9b13-d563d3f0e31b\") " pod="openstack/horizon-5d459df97c-xmd6q" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.840078 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a3bd053b-19f8-4908-b8d5-e5c0ae5599c0-scripts\") pod \"placement-db-sync-px7rk\" (UID: \"a3bd053b-19f8-4908-b8d5-e5c0ae5599c0\") " pod="openstack/placement-db-sync-px7rk" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.840209 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mkrtd\" (UniqueName: \"kubernetes.io/projected/a3bd053b-19f8-4908-b8d5-e5c0ae5599c0-kube-api-access-mkrtd\") pod \"placement-db-sync-px7rk\" (UID: \"a3bd053b-19f8-4908-b8d5-e5c0ae5599c0\") " pod="openstack/placement-db-sync-px7rk" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.840283 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2cc1af3d-7d60-4b25-9b13-d563d3f0e31b-scripts\") pod \"horizon-5d459df97c-xmd6q\" (UID: \"2cc1af3d-7d60-4b25-9b13-d563d3f0e31b\") " pod="openstack/horizon-5d459df97c-xmd6q" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.840358 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n8nk8\" (UniqueName: \"kubernetes.io/projected/2cc1af3d-7d60-4b25-9b13-d563d3f0e31b-kube-api-access-n8nk8\") pod \"horizon-5d459df97c-xmd6q\" (UID: \"2cc1af3d-7d60-4b25-9b13-d563d3f0e31b\") " pod="openstack/horizon-5d459df97c-xmd6q" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.859315 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2cc1af3d-7d60-4b25-9b13-d563d3f0e31b-scripts\") pod \"horizon-5d459df97c-xmd6q\" (UID: \"2cc1af3d-7d60-4b25-9b13-d563d3f0e31b\") " pod="openstack/horizon-5d459df97c-xmd6q" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.861503 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2cc1af3d-7d60-4b25-9b13-d563d3f0e31b-logs\") pod \"horizon-5d459df97c-xmd6q\" (UID: \"2cc1af3d-7d60-4b25-9b13-d563d3f0e31b\") " pod="openstack/horizon-5d459df97c-xmd6q" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.862000 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a3bd053b-19f8-4908-b8d5-e5c0ae5599c0-logs\") pod \"placement-db-sync-px7rk\" (UID: \"a3bd053b-19f8-4908-b8d5-e5c0ae5599c0\") " pod="openstack/placement-db-sync-px7rk" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.862425 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2cc1af3d-7d60-4b25-9b13-d563d3f0e31b-config-data\") pod \"horizon-5d459df97c-xmd6q\" (UID: \"2cc1af3d-7d60-4b25-9b13-d563d3f0e31b\") " pod="openstack/horizon-5d459df97c-xmd6q" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.876056 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a3bd053b-19f8-4908-b8d5-e5c0ae5599c0-config-data\") pod \"placement-db-sync-px7rk\" (UID: \"a3bd053b-19f8-4908-b8d5-e5c0ae5599c0\") " pod="openstack/placement-db-sync-px7rk" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.882541 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a3bd053b-19f8-4908-b8d5-e5c0ae5599c0-scripts\") pod \"placement-db-sync-px7rk\" (UID: \"a3bd053b-19f8-4908-b8d5-e5c0ae5599c0\") " pod="openstack/placement-db-sync-px7rk" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.884705 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mkrtd\" (UniqueName: \"kubernetes.io/projected/a3bd053b-19f8-4908-b8d5-e5c0ae5599c0-kube-api-access-mkrtd\") pod \"placement-db-sync-px7rk\" (UID: \"a3bd053b-19f8-4908-b8d5-e5c0ae5599c0\") " pod="openstack/placement-db-sync-px7rk" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.885515 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n8nk8\" (UniqueName: \"kubernetes.io/projected/2cc1af3d-7d60-4b25-9b13-d563d3f0e31b-kube-api-access-n8nk8\") pod \"horizon-5d459df97c-xmd6q\" (UID: \"2cc1af3d-7d60-4b25-9b13-d563d3f0e31b\") " pod="openstack/horizon-5d459df97c-xmd6q" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.906819 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.908994 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.913553 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3bd053b-19f8-4908-b8d5-e5c0ae5599c0-combined-ca-bundle\") pod \"placement-db-sync-px7rk\" (UID: \"a3bd053b-19f8-4908-b8d5-e5c0ae5599c0\") " pod="openstack/placement-db-sync-px7rk" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.914432 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.914959 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.929470 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/2cc1af3d-7d60-4b25-9b13-d563d3f0e31b-horizon-secret-key\") pod \"horizon-5d459df97c-xmd6q\" (UID: \"2cc1af3d-7d60-4b25-9b13-d563d3f0e31b\") " pod="openstack/horizon-5d459df97c-xmd6q" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.949674 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fz8xq\" (UniqueName: \"kubernetes.io/projected/e43f6d4f-3963-49e1-92a3-ee75fcac6e16-kube-api-access-fz8xq\") pod \"dnsmasq-dns-76fcf4b695-ntvfv\" (UID: \"e43f6d4f-3963-49e1-92a3-ee75fcac6e16\") " pod="openstack/dnsmasq-dns-76fcf4b695-ntvfv" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.949763 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e43f6d4f-3963-49e1-92a3-ee75fcac6e16-config\") pod \"dnsmasq-dns-76fcf4b695-ntvfv\" (UID: \"e43f6d4f-3963-49e1-92a3-ee75fcac6e16\") " pod="openstack/dnsmasq-dns-76fcf4b695-ntvfv" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.949850 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e43f6d4f-3963-49e1-92a3-ee75fcac6e16-dns-svc\") pod \"dnsmasq-dns-76fcf4b695-ntvfv\" (UID: \"e43f6d4f-3963-49e1-92a3-ee75fcac6e16\") " pod="openstack/dnsmasq-dns-76fcf4b695-ntvfv" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.949885 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e43f6d4f-3963-49e1-92a3-ee75fcac6e16-dns-swift-storage-0\") pod \"dnsmasq-dns-76fcf4b695-ntvfv\" (UID: \"e43f6d4f-3963-49e1-92a3-ee75fcac6e16\") " pod="openstack/dnsmasq-dns-76fcf4b695-ntvfv" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.949936 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e43f6d4f-3963-49e1-92a3-ee75fcac6e16-ovsdbserver-nb\") pod \"dnsmasq-dns-76fcf4b695-ntvfv\" (UID: \"e43f6d4f-3963-49e1-92a3-ee75fcac6e16\") " pod="openstack/dnsmasq-dns-76fcf4b695-ntvfv" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.949954 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e43f6d4f-3963-49e1-92a3-ee75fcac6e16-ovsdbserver-sb\") pod \"dnsmasq-dns-76fcf4b695-ntvfv\" (UID: \"e43f6d4f-3963-49e1-92a3-ee75fcac6e16\") " pod="openstack/dnsmasq-dns-76fcf4b695-ntvfv" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.951773 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-32cc-account-create-cbl5v" Oct 01 14:02:00 crc kubenswrapper[4605]: I1001 14:02:00.975821 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.003467 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-2e10-account-create-57khj" Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.025303 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-px7rk" Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.032207 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-l69r5" Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.062115 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mmhwr\" (UniqueName: \"kubernetes.io/projected/a17b7512-23cb-4c81-a755-788fecd5ece2-kube-api-access-mmhwr\") pod \"a17b7512-23cb-4c81-a755-788fecd5ece2\" (UID: \"a17b7512-23cb-4c81-a755-788fecd5ece2\") " Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.062454 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/df36773f-c59f-4abb-9adf-20dee81012ae-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"df36773f-c59f-4abb-9adf-20dee81012ae\") " pod="openstack/ceilometer-0" Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.062511 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fz8xq\" (UniqueName: \"kubernetes.io/projected/e43f6d4f-3963-49e1-92a3-ee75fcac6e16-kube-api-access-fz8xq\") pod \"dnsmasq-dns-76fcf4b695-ntvfv\" (UID: \"e43f6d4f-3963-49e1-92a3-ee75fcac6e16\") " pod="openstack/dnsmasq-dns-76fcf4b695-ntvfv" Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.062531 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fzsqw\" (UniqueName: \"kubernetes.io/projected/df36773f-c59f-4abb-9adf-20dee81012ae-kube-api-access-fzsqw\") pod \"ceilometer-0\" (UID: \"df36773f-c59f-4abb-9adf-20dee81012ae\") " pod="openstack/ceilometer-0" Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.062563 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/df36773f-c59f-4abb-9adf-20dee81012ae-run-httpd\") pod \"ceilometer-0\" (UID: \"df36773f-c59f-4abb-9adf-20dee81012ae\") " pod="openstack/ceilometer-0" Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.062583 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df36773f-c59f-4abb-9adf-20dee81012ae-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"df36773f-c59f-4abb-9adf-20dee81012ae\") " pod="openstack/ceilometer-0" Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.062626 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e43f6d4f-3963-49e1-92a3-ee75fcac6e16-config\") pod \"dnsmasq-dns-76fcf4b695-ntvfv\" (UID: \"e43f6d4f-3963-49e1-92a3-ee75fcac6e16\") " pod="openstack/dnsmasq-dns-76fcf4b695-ntvfv" Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.062663 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df36773f-c59f-4abb-9adf-20dee81012ae-config-data\") pod \"ceilometer-0\" (UID: \"df36773f-c59f-4abb-9adf-20dee81012ae\") " pod="openstack/ceilometer-0" Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.062700 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/df36773f-c59f-4abb-9adf-20dee81012ae-log-httpd\") pod \"ceilometer-0\" (UID: \"df36773f-c59f-4abb-9adf-20dee81012ae\") " pod="openstack/ceilometer-0" Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.062724 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e43f6d4f-3963-49e1-92a3-ee75fcac6e16-dns-svc\") pod \"dnsmasq-dns-76fcf4b695-ntvfv\" (UID: \"e43f6d4f-3963-49e1-92a3-ee75fcac6e16\") " pod="openstack/dnsmasq-dns-76fcf4b695-ntvfv" Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.062755 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e43f6d4f-3963-49e1-92a3-ee75fcac6e16-dns-swift-storage-0\") pod \"dnsmasq-dns-76fcf4b695-ntvfv\" (UID: \"e43f6d4f-3963-49e1-92a3-ee75fcac6e16\") " pod="openstack/dnsmasq-dns-76fcf4b695-ntvfv" Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.062796 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e43f6d4f-3963-49e1-92a3-ee75fcac6e16-ovsdbserver-nb\") pod \"dnsmasq-dns-76fcf4b695-ntvfv\" (UID: \"e43f6d4f-3963-49e1-92a3-ee75fcac6e16\") " pod="openstack/dnsmasq-dns-76fcf4b695-ntvfv" Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.062819 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e43f6d4f-3963-49e1-92a3-ee75fcac6e16-ovsdbserver-sb\") pod \"dnsmasq-dns-76fcf4b695-ntvfv\" (UID: \"e43f6d4f-3963-49e1-92a3-ee75fcac6e16\") " pod="openstack/dnsmasq-dns-76fcf4b695-ntvfv" Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.062840 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/df36773f-c59f-4abb-9adf-20dee81012ae-scripts\") pod \"ceilometer-0\" (UID: \"df36773f-c59f-4abb-9adf-20dee81012ae\") " pod="openstack/ceilometer-0" Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.064329 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e43f6d4f-3963-49e1-92a3-ee75fcac6e16-config\") pod \"dnsmasq-dns-76fcf4b695-ntvfv\" (UID: \"e43f6d4f-3963-49e1-92a3-ee75fcac6e16\") " pod="openstack/dnsmasq-dns-76fcf4b695-ntvfv" Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.065550 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e43f6d4f-3963-49e1-92a3-ee75fcac6e16-dns-svc\") pod \"dnsmasq-dns-76fcf4b695-ntvfv\" (UID: \"e43f6d4f-3963-49e1-92a3-ee75fcac6e16\") " pod="openstack/dnsmasq-dns-76fcf4b695-ntvfv" Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.072015 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e43f6d4f-3963-49e1-92a3-ee75fcac6e16-ovsdbserver-nb\") pod \"dnsmasq-dns-76fcf4b695-ntvfv\" (UID: \"e43f6d4f-3963-49e1-92a3-ee75fcac6e16\") " pod="openstack/dnsmasq-dns-76fcf4b695-ntvfv" Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.072592 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e43f6d4f-3963-49e1-92a3-ee75fcac6e16-dns-swift-storage-0\") pod \"dnsmasq-dns-76fcf4b695-ntvfv\" (UID: \"e43f6d4f-3963-49e1-92a3-ee75fcac6e16\") " pod="openstack/dnsmasq-dns-76fcf4b695-ntvfv" Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.072759 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a17b7512-23cb-4c81-a755-788fecd5ece2-kube-api-access-mmhwr" (OuterVolumeSpecName: "kube-api-access-mmhwr") pod "a17b7512-23cb-4c81-a755-788fecd5ece2" (UID: "a17b7512-23cb-4c81-a755-788fecd5ece2"). InnerVolumeSpecName "kube-api-access-mmhwr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.072799 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5d459df97c-xmd6q" Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.072865 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e43f6d4f-3963-49e1-92a3-ee75fcac6e16-ovsdbserver-sb\") pod \"dnsmasq-dns-76fcf4b695-ntvfv\" (UID: \"e43f6d4f-3963-49e1-92a3-ee75fcac6e16\") " pod="openstack/dnsmasq-dns-76fcf4b695-ntvfv" Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.091676 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fz8xq\" (UniqueName: \"kubernetes.io/projected/e43f6d4f-3963-49e1-92a3-ee75fcac6e16-kube-api-access-fz8xq\") pod \"dnsmasq-dns-76fcf4b695-ntvfv\" (UID: \"e43f6d4f-3963-49e1-92a3-ee75fcac6e16\") " pod="openstack/dnsmasq-dns-76fcf4b695-ntvfv" Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.160440 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76fcf4b695-ntvfv" Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.164123 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4xlqt\" (UniqueName: \"kubernetes.io/projected/a9489bed-ef81-40d8-8a3e-1cc162ced1b6-kube-api-access-4xlqt\") pod \"a9489bed-ef81-40d8-8a3e-1cc162ced1b6\" (UID: \"a9489bed-ef81-40d8-8a3e-1cc162ced1b6\") " Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.164190 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a9489bed-ef81-40d8-8a3e-1cc162ced1b6-config-data\") pod \"a9489bed-ef81-40d8-8a3e-1cc162ced1b6\" (UID: \"a9489bed-ef81-40d8-8a3e-1cc162ced1b6\") " Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.164231 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cw4vv\" (UniqueName: \"kubernetes.io/projected/c9b91c60-f218-4df3-ad79-b40f7614f5dd-kube-api-access-cw4vv\") pod \"c9b91c60-f218-4df3-ad79-b40f7614f5dd\" (UID: \"c9b91c60-f218-4df3-ad79-b40f7614f5dd\") " Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.164319 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9489bed-ef81-40d8-8a3e-1cc162ced1b6-combined-ca-bundle\") pod \"a9489bed-ef81-40d8-8a3e-1cc162ced1b6\" (UID: \"a9489bed-ef81-40d8-8a3e-1cc162ced1b6\") " Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.164374 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a9489bed-ef81-40d8-8a3e-1cc162ced1b6-db-sync-config-data\") pod \"a9489bed-ef81-40d8-8a3e-1cc162ced1b6\" (UID: \"a9489bed-ef81-40d8-8a3e-1cc162ced1b6\") " Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.164614 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/df36773f-c59f-4abb-9adf-20dee81012ae-scripts\") pod \"ceilometer-0\" (UID: \"df36773f-c59f-4abb-9adf-20dee81012ae\") " pod="openstack/ceilometer-0" Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.164672 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/df36773f-c59f-4abb-9adf-20dee81012ae-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"df36773f-c59f-4abb-9adf-20dee81012ae\") " pod="openstack/ceilometer-0" Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.164701 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fzsqw\" (UniqueName: \"kubernetes.io/projected/df36773f-c59f-4abb-9adf-20dee81012ae-kube-api-access-fzsqw\") pod \"ceilometer-0\" (UID: \"df36773f-c59f-4abb-9adf-20dee81012ae\") " pod="openstack/ceilometer-0" Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.164722 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/df36773f-c59f-4abb-9adf-20dee81012ae-run-httpd\") pod \"ceilometer-0\" (UID: \"df36773f-c59f-4abb-9adf-20dee81012ae\") " pod="openstack/ceilometer-0" Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.164737 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df36773f-c59f-4abb-9adf-20dee81012ae-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"df36773f-c59f-4abb-9adf-20dee81012ae\") " pod="openstack/ceilometer-0" Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.164774 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df36773f-c59f-4abb-9adf-20dee81012ae-config-data\") pod \"ceilometer-0\" (UID: \"df36773f-c59f-4abb-9adf-20dee81012ae\") " pod="openstack/ceilometer-0" Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.164805 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/df36773f-c59f-4abb-9adf-20dee81012ae-log-httpd\") pod \"ceilometer-0\" (UID: \"df36773f-c59f-4abb-9adf-20dee81012ae\") " pod="openstack/ceilometer-0" Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.164850 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mmhwr\" (UniqueName: \"kubernetes.io/projected/a17b7512-23cb-4c81-a755-788fecd5ece2-kube-api-access-mmhwr\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.165175 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/df36773f-c59f-4abb-9adf-20dee81012ae-log-httpd\") pod \"ceilometer-0\" (UID: \"df36773f-c59f-4abb-9adf-20dee81012ae\") " pod="openstack/ceilometer-0" Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.170377 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a9489bed-ef81-40d8-8a3e-1cc162ced1b6-kube-api-access-4xlqt" (OuterVolumeSpecName: "kube-api-access-4xlqt") pod "a9489bed-ef81-40d8-8a3e-1cc162ced1b6" (UID: "a9489bed-ef81-40d8-8a3e-1cc162ced1b6"). InnerVolumeSpecName "kube-api-access-4xlqt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.171789 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/df36773f-c59f-4abb-9adf-20dee81012ae-run-httpd\") pod \"ceilometer-0\" (UID: \"df36773f-c59f-4abb-9adf-20dee81012ae\") " pod="openstack/ceilometer-0" Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.174374 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/df36773f-c59f-4abb-9adf-20dee81012ae-scripts\") pod \"ceilometer-0\" (UID: \"df36773f-c59f-4abb-9adf-20dee81012ae\") " pod="openstack/ceilometer-0" Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.175186 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9489bed-ef81-40d8-8a3e-1cc162ced1b6-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "a9489bed-ef81-40d8-8a3e-1cc162ced1b6" (UID: "a9489bed-ef81-40d8-8a3e-1cc162ced1b6"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.181190 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c9b91c60-f218-4df3-ad79-b40f7614f5dd-kube-api-access-cw4vv" (OuterVolumeSpecName: "kube-api-access-cw4vv") pod "c9b91c60-f218-4df3-ad79-b40f7614f5dd" (UID: "c9b91c60-f218-4df3-ad79-b40f7614f5dd"). InnerVolumeSpecName "kube-api-access-cw4vv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.181824 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/df36773f-c59f-4abb-9adf-20dee81012ae-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"df36773f-c59f-4abb-9adf-20dee81012ae\") " pod="openstack/ceilometer-0" Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.184787 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df36773f-c59f-4abb-9adf-20dee81012ae-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"df36773f-c59f-4abb-9adf-20dee81012ae\") " pod="openstack/ceilometer-0" Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.185498 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df36773f-c59f-4abb-9adf-20dee81012ae-config-data\") pod \"ceilometer-0\" (UID: \"df36773f-c59f-4abb-9adf-20dee81012ae\") " pod="openstack/ceilometer-0" Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.199259 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fzsqw\" (UniqueName: \"kubernetes.io/projected/df36773f-c59f-4abb-9adf-20dee81012ae-kube-api-access-fzsqw\") pod \"ceilometer-0\" (UID: \"df36773f-c59f-4abb-9adf-20dee81012ae\") " pod="openstack/ceilometer-0" Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.205073 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9489bed-ef81-40d8-8a3e-1cc162ced1b6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a9489bed-ef81-40d8-8a3e-1cc162ced1b6" (UID: "a9489bed-ef81-40d8-8a3e-1cc162ced1b6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.260053 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.270402 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4xlqt\" (UniqueName: \"kubernetes.io/projected/a9489bed-ef81-40d8-8a3e-1cc162ced1b6-kube-api-access-4xlqt\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.270455 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cw4vv\" (UniqueName: \"kubernetes.io/projected/c9b91c60-f218-4df3-ad79-b40f7614f5dd-kube-api-access-cw4vv\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.270468 4605 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9489bed-ef81-40d8-8a3e-1cc162ced1b6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.270479 4605 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a9489bed-ef81-40d8-8a3e-1cc162ced1b6-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.307325 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9489bed-ef81-40d8-8a3e-1cc162ced1b6-config-data" (OuterVolumeSpecName: "config-data") pod "a9489bed-ef81-40d8-8a3e-1cc162ced1b6" (UID: "a9489bed-ef81-40d8-8a3e-1cc162ced1b6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.373407 4605 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a9489bed-ef81-40d8-8a3e-1cc162ced1b6-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.425815 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-5qt7p"] Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.475609 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-55fff446b9-f5jjw"] Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.769257 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-f76447ddf-ghwkf"] Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.821387 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-l69r5" event={"ID":"a9489bed-ef81-40d8-8a3e-1cc162ced1b6","Type":"ContainerDied","Data":"58d23a4d7724bf84d2bc846d3190762454856c2397f434b32bfaafb5a7fb9f38"} Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.821425 4605 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="58d23a4d7724bf84d2bc846d3190762454856c2397f434b32bfaafb5a7fb9f38" Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.821504 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-l69r5" Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.835408 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55fff446b9-f5jjw" event={"ID":"676e0fa1-794f-4acf-9c92-1405d3fff0f1","Type":"ContainerStarted","Data":"14967906fa0d64153aa291788e89cc88528af52a8c923ce0574129b00d98f205"} Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.835446 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55fff446b9-f5jjw" event={"ID":"676e0fa1-794f-4acf-9c92-1405d3fff0f1","Type":"ContainerStarted","Data":"ea47ce33ea57d3abc6a4570814af5dc94e76ae2090cdb57bfd56e5cde4853c4f"} Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.835553 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-55fff446b9-f5jjw" podUID="676e0fa1-794f-4acf-9c92-1405d3fff0f1" containerName="init" containerID="cri-o://14967906fa0d64153aa291788e89cc88528af52a8c923ce0574129b00d98f205" gracePeriod=10 Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.853129 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-32cc-account-create-cbl5v" event={"ID":"a17b7512-23cb-4c81-a755-788fecd5ece2","Type":"ContainerDied","Data":"6b50013509f37444e811add0fe3454295dc9baa8f7534fc1e24095c1b85fc6d9"} Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.853163 4605 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6b50013509f37444e811add0fe3454295dc9baa8f7534fc1e24095c1b85fc6d9" Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.853236 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-32cc-account-create-cbl5v" Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.863966 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-5qt7p" event={"ID":"0aafc049-c1db-4e02-a3fa-07274790c6f6","Type":"ContainerStarted","Data":"57cf237c4829da4dbf12faad24a5e9bf332462eb8be5ae8c2d20f53919726afa"} Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.864006 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-5qt7p" event={"ID":"0aafc049-c1db-4e02-a3fa-07274790c6f6","Type":"ContainerStarted","Data":"ceb9391974780989995344ac4170c9fff2afb9899976b6a96d1423e77d45fdaf"} Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.866673 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-2e10-account-create-57khj" event={"ID":"c9b91c60-f218-4df3-ad79-b40f7614f5dd","Type":"ContainerDied","Data":"778ae29f68a0a64a47a903ea45845a9190cb425f14d006806f2e230edb8d8408"} Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.866689 4605 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="778ae29f68a0a64a47a903ea45845a9190cb425f14d006806f2e230edb8d8408" Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.866738 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-2e10-account-create-57khj" Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.952973 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-5qt7p" podStartSLOduration=1.952951912 podStartE2EDuration="1.952951912s" podCreationTimestamp="2025-10-01 14:02:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 14:02:01.914518377 +0000 UTC m=+1044.658494585" watchObservedRunningTime="2025-10-01 14:02:01.952951912 +0000 UTC m=+1044.696928120" Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.966137 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-76fcf4b695-ntvfv"] Oct 01 14:02:01 crc kubenswrapper[4605]: W1001 14:02:01.974998 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode43f6d4f_3963_49e1_92a3_ee75fcac6e16.slice/crio-d5b1e012b6bc8cc8482c96977c52d6574055e2c3c482f43500fadb588bd7d275 WatchSource:0}: Error finding container d5b1e012b6bc8cc8482c96977c52d6574055e2c3c482f43500fadb588bd7d275: Status 404 returned error can't find the container with id d5b1e012b6bc8cc8482c96977c52d6574055e2c3c482f43500fadb588bd7d275 Oct 01 14:02:01 crc kubenswrapper[4605]: W1001 14:02:01.980544 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2cc1af3d_7d60_4b25_9b13_d563d3f0e31b.slice/crio-32b5d36334950886671658fbd63dbbdfd392b4e86fa1c816a2e130481cc76a7d WatchSource:0}: Error finding container 32b5d36334950886671658fbd63dbbdfd392b4e86fa1c816a2e130481cc76a7d: Status 404 returned error can't find the container with id 32b5d36334950886671658fbd63dbbdfd392b4e86fa1c816a2e130481cc76a7d Oct 01 14:02:01 crc kubenswrapper[4605]: W1001 14:02:01.980805 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda3bd053b_19f8_4908_b8d5_e5c0ae5599c0.slice/crio-c3950ca679c87391d6c061a9f85bbb19c59433cde3b17f9f0c20c08921b91c6e WatchSource:0}: Error finding container c3950ca679c87391d6c061a9f85bbb19c59433cde3b17f9f0c20c08921b91c6e: Status 404 returned error can't find the container with id c3950ca679c87391d6c061a9f85bbb19c59433cde3b17f9f0c20c08921b91c6e Oct 01 14:02:01 crc kubenswrapper[4605]: I1001 14:02:01.993166 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-5d459df97c-xmd6q"] Oct 01 14:02:02 crc kubenswrapper[4605]: I1001 14:02:02.022409 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-px7rk"] Oct 01 14:02:02 crc kubenswrapper[4605]: I1001 14:02:02.136212 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-978c2"] Oct 01 14:02:02 crc kubenswrapper[4605]: E1001 14:02:02.136707 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c9b91c60-f218-4df3-ad79-b40f7614f5dd" containerName="mariadb-account-create" Oct 01 14:02:02 crc kubenswrapper[4605]: I1001 14:02:02.136730 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="c9b91c60-f218-4df3-ad79-b40f7614f5dd" containerName="mariadb-account-create" Oct 01 14:02:02 crc kubenswrapper[4605]: E1001 14:02:02.136753 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9489bed-ef81-40d8-8a3e-1cc162ced1b6" containerName="glance-db-sync" Oct 01 14:02:02 crc kubenswrapper[4605]: I1001 14:02:02.136761 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9489bed-ef81-40d8-8a3e-1cc162ced1b6" containerName="glance-db-sync" Oct 01 14:02:02 crc kubenswrapper[4605]: E1001 14:02:02.136797 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a17b7512-23cb-4c81-a755-788fecd5ece2" containerName="mariadb-account-create" Oct 01 14:02:02 crc kubenswrapper[4605]: I1001 14:02:02.136805 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="a17b7512-23cb-4c81-a755-788fecd5ece2" containerName="mariadb-account-create" Oct 01 14:02:02 crc kubenswrapper[4605]: I1001 14:02:02.137027 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="c9b91c60-f218-4df3-ad79-b40f7614f5dd" containerName="mariadb-account-create" Oct 01 14:02:02 crc kubenswrapper[4605]: I1001 14:02:02.137051 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="a17b7512-23cb-4c81-a755-788fecd5ece2" containerName="mariadb-account-create" Oct 01 14:02:02 crc kubenswrapper[4605]: I1001 14:02:02.137071 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="a9489bed-ef81-40d8-8a3e-1cc162ced1b6" containerName="glance-db-sync" Oct 01 14:02:02 crc kubenswrapper[4605]: I1001 14:02:02.137786 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-978c2" Oct 01 14:02:02 crc kubenswrapper[4605]: I1001 14:02:02.143219 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Oct 01 14:02:02 crc kubenswrapper[4605]: I1001 14:02:02.143403 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-7fqq4" Oct 01 14:02:02 crc kubenswrapper[4605]: I1001 14:02:02.143639 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Oct 01 14:02:02 crc kubenswrapper[4605]: I1001 14:02:02.144021 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-978c2"] Oct 01 14:02:02 crc kubenswrapper[4605]: W1001 14:02:02.271253 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddf36773f_c59f_4abb_9adf_20dee81012ae.slice/crio-07881f407a2c0424f0d04440674ac8125faf7ce3a119954e51de155b19acccaa WatchSource:0}: Error finding container 07881f407a2c0424f0d04440674ac8125faf7ce3a119954e51de155b19acccaa: Status 404 returned error can't find the container with id 07881f407a2c0424f0d04440674ac8125faf7ce3a119954e51de155b19acccaa Oct 01 14:02:02 crc kubenswrapper[4605]: I1001 14:02:02.271404 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 01 14:02:02 crc kubenswrapper[4605]: I1001 14:02:02.297617 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/287fa988-b116-4b4d-a02c-990e801124d0-scripts\") pod \"cinder-db-sync-978c2\" (UID: \"287fa988-b116-4b4d-a02c-990e801124d0\") " pod="openstack/cinder-db-sync-978c2" Oct 01 14:02:02 crc kubenswrapper[4605]: I1001 14:02:02.297696 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/287fa988-b116-4b4d-a02c-990e801124d0-etc-machine-id\") pod \"cinder-db-sync-978c2\" (UID: \"287fa988-b116-4b4d-a02c-990e801124d0\") " pod="openstack/cinder-db-sync-978c2" Oct 01 14:02:02 crc kubenswrapper[4605]: I1001 14:02:02.297715 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/287fa988-b116-4b4d-a02c-990e801124d0-db-sync-config-data\") pod \"cinder-db-sync-978c2\" (UID: \"287fa988-b116-4b4d-a02c-990e801124d0\") " pod="openstack/cinder-db-sync-978c2" Oct 01 14:02:02 crc kubenswrapper[4605]: I1001 14:02:02.297780 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/287fa988-b116-4b4d-a02c-990e801124d0-combined-ca-bundle\") pod \"cinder-db-sync-978c2\" (UID: \"287fa988-b116-4b4d-a02c-990e801124d0\") " pod="openstack/cinder-db-sync-978c2" Oct 01 14:02:02 crc kubenswrapper[4605]: I1001 14:02:02.297803 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/287fa988-b116-4b4d-a02c-990e801124d0-config-data\") pod \"cinder-db-sync-978c2\" (UID: \"287fa988-b116-4b4d-a02c-990e801124d0\") " pod="openstack/cinder-db-sync-978c2" Oct 01 14:02:02 crc kubenswrapper[4605]: I1001 14:02:02.297919 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bvfhk\" (UniqueName: \"kubernetes.io/projected/287fa988-b116-4b4d-a02c-990e801124d0-kube-api-access-bvfhk\") pod \"cinder-db-sync-978c2\" (UID: \"287fa988-b116-4b4d-a02c-990e801124d0\") " pod="openstack/cinder-db-sync-978c2" Oct 01 14:02:02 crc kubenswrapper[4605]: I1001 14:02:02.403065 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/287fa988-b116-4b4d-a02c-990e801124d0-etc-machine-id\") pod \"cinder-db-sync-978c2\" (UID: \"287fa988-b116-4b4d-a02c-990e801124d0\") " pod="openstack/cinder-db-sync-978c2" Oct 01 14:02:02 crc kubenswrapper[4605]: I1001 14:02:02.403446 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/287fa988-b116-4b4d-a02c-990e801124d0-db-sync-config-data\") pod \"cinder-db-sync-978c2\" (UID: \"287fa988-b116-4b4d-a02c-990e801124d0\") " pod="openstack/cinder-db-sync-978c2" Oct 01 14:02:02 crc kubenswrapper[4605]: I1001 14:02:02.403510 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/287fa988-b116-4b4d-a02c-990e801124d0-etc-machine-id\") pod \"cinder-db-sync-978c2\" (UID: \"287fa988-b116-4b4d-a02c-990e801124d0\") " pod="openstack/cinder-db-sync-978c2" Oct 01 14:02:02 crc kubenswrapper[4605]: I1001 14:02:02.403513 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/287fa988-b116-4b4d-a02c-990e801124d0-combined-ca-bundle\") pod \"cinder-db-sync-978c2\" (UID: \"287fa988-b116-4b4d-a02c-990e801124d0\") " pod="openstack/cinder-db-sync-978c2" Oct 01 14:02:02 crc kubenswrapper[4605]: I1001 14:02:02.403599 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/287fa988-b116-4b4d-a02c-990e801124d0-config-data\") pod \"cinder-db-sync-978c2\" (UID: \"287fa988-b116-4b4d-a02c-990e801124d0\") " pod="openstack/cinder-db-sync-978c2" Oct 01 14:02:02 crc kubenswrapper[4605]: I1001 14:02:02.403785 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bvfhk\" (UniqueName: \"kubernetes.io/projected/287fa988-b116-4b4d-a02c-990e801124d0-kube-api-access-bvfhk\") pod \"cinder-db-sync-978c2\" (UID: \"287fa988-b116-4b4d-a02c-990e801124d0\") " pod="openstack/cinder-db-sync-978c2" Oct 01 14:02:02 crc kubenswrapper[4605]: I1001 14:02:02.403871 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/287fa988-b116-4b4d-a02c-990e801124d0-scripts\") pod \"cinder-db-sync-978c2\" (UID: \"287fa988-b116-4b4d-a02c-990e801124d0\") " pod="openstack/cinder-db-sync-978c2" Oct 01 14:02:02 crc kubenswrapper[4605]: I1001 14:02:02.407600 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/287fa988-b116-4b4d-a02c-990e801124d0-scripts\") pod \"cinder-db-sync-978c2\" (UID: \"287fa988-b116-4b4d-a02c-990e801124d0\") " pod="openstack/cinder-db-sync-978c2" Oct 01 14:02:02 crc kubenswrapper[4605]: I1001 14:02:02.407769 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/287fa988-b116-4b4d-a02c-990e801124d0-combined-ca-bundle\") pod \"cinder-db-sync-978c2\" (UID: \"287fa988-b116-4b4d-a02c-990e801124d0\") " pod="openstack/cinder-db-sync-978c2" Oct 01 14:02:02 crc kubenswrapper[4605]: I1001 14:02:02.416211 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/287fa988-b116-4b4d-a02c-990e801124d0-config-data\") pod \"cinder-db-sync-978c2\" (UID: \"287fa988-b116-4b4d-a02c-990e801124d0\") " pod="openstack/cinder-db-sync-978c2" Oct 01 14:02:02 crc kubenswrapper[4605]: I1001 14:02:02.427743 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/287fa988-b116-4b4d-a02c-990e801124d0-db-sync-config-data\") pod \"cinder-db-sync-978c2\" (UID: \"287fa988-b116-4b4d-a02c-990e801124d0\") " pod="openstack/cinder-db-sync-978c2" Oct 01 14:02:02 crc kubenswrapper[4605]: I1001 14:02:02.476649 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bvfhk\" (UniqueName: \"kubernetes.io/projected/287fa988-b116-4b4d-a02c-990e801124d0-kube-api-access-bvfhk\") pod \"cinder-db-sync-978c2\" (UID: \"287fa988-b116-4b4d-a02c-990e801124d0\") " pod="openstack/cinder-db-sync-978c2" Oct 01 14:02:02 crc kubenswrapper[4605]: I1001 14:02:02.764085 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-978c2" Oct 01 14:02:02 crc kubenswrapper[4605]: I1001 14:02:02.784515 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55fff446b9-f5jjw" Oct 01 14:02:02 crc kubenswrapper[4605]: I1001 14:02:02.923538 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/676e0fa1-794f-4acf-9c92-1405d3fff0f1-config\") pod \"676e0fa1-794f-4acf-9c92-1405d3fff0f1\" (UID: \"676e0fa1-794f-4acf-9c92-1405d3fff0f1\") " Oct 01 14:02:02 crc kubenswrapper[4605]: I1001 14:02:02.923599 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/676e0fa1-794f-4acf-9c92-1405d3fff0f1-ovsdbserver-sb\") pod \"676e0fa1-794f-4acf-9c92-1405d3fff0f1\" (UID: \"676e0fa1-794f-4acf-9c92-1405d3fff0f1\") " Oct 01 14:02:02 crc kubenswrapper[4605]: I1001 14:02:02.923655 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/676e0fa1-794f-4acf-9c92-1405d3fff0f1-dns-svc\") pod \"676e0fa1-794f-4acf-9c92-1405d3fff0f1\" (UID: \"676e0fa1-794f-4acf-9c92-1405d3fff0f1\") " Oct 01 14:02:02 crc kubenswrapper[4605]: I1001 14:02:02.923732 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/676e0fa1-794f-4acf-9c92-1405d3fff0f1-ovsdbserver-nb\") pod \"676e0fa1-794f-4acf-9c92-1405d3fff0f1\" (UID: \"676e0fa1-794f-4acf-9c92-1405d3fff0f1\") " Oct 01 14:02:02 crc kubenswrapper[4605]: I1001 14:02:02.923766 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5fn4m\" (UniqueName: \"kubernetes.io/projected/676e0fa1-794f-4acf-9c92-1405d3fff0f1-kube-api-access-5fn4m\") pod \"676e0fa1-794f-4acf-9c92-1405d3fff0f1\" (UID: \"676e0fa1-794f-4acf-9c92-1405d3fff0f1\") " Oct 01 14:02:02 crc kubenswrapper[4605]: I1001 14:02:02.923794 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/676e0fa1-794f-4acf-9c92-1405d3fff0f1-dns-swift-storage-0\") pod \"676e0fa1-794f-4acf-9c92-1405d3fff0f1\" (UID: \"676e0fa1-794f-4acf-9c92-1405d3fff0f1\") " Oct 01 14:02:02 crc kubenswrapper[4605]: I1001 14:02:02.979931 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-px7rk" event={"ID":"a3bd053b-19f8-4908-b8d5-e5c0ae5599c0","Type":"ContainerStarted","Data":"c3950ca679c87391d6c061a9f85bbb19c59433cde3b17f9f0c20c08921b91c6e"} Oct 01 14:02:03 crc kubenswrapper[4605]: I1001 14:02:02.997531 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-76fcf4b695-ntvfv"] Oct 01 14:02:03 crc kubenswrapper[4605]: I1001 14:02:03.008767 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/676e0fa1-794f-4acf-9c92-1405d3fff0f1-kube-api-access-5fn4m" (OuterVolumeSpecName: "kube-api-access-5fn4m") pod "676e0fa1-794f-4acf-9c92-1405d3fff0f1" (UID: "676e0fa1-794f-4acf-9c92-1405d3fff0f1"). InnerVolumeSpecName "kube-api-access-5fn4m". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:02:03 crc kubenswrapper[4605]: I1001 14:02:03.015983 4605 generic.go:334] "Generic (PLEG): container finished" podID="676e0fa1-794f-4acf-9c92-1405d3fff0f1" containerID="14967906fa0d64153aa291788e89cc88528af52a8c923ce0574129b00d98f205" exitCode=0 Oct 01 14:02:03 crc kubenswrapper[4605]: I1001 14:02:03.016092 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55fff446b9-f5jjw" event={"ID":"676e0fa1-794f-4acf-9c92-1405d3fff0f1","Type":"ContainerDied","Data":"14967906fa0d64153aa291788e89cc88528af52a8c923ce0574129b00d98f205"} Oct 01 14:02:03 crc kubenswrapper[4605]: I1001 14:02:03.016134 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55fff446b9-f5jjw" event={"ID":"676e0fa1-794f-4acf-9c92-1405d3fff0f1","Type":"ContainerDied","Data":"ea47ce33ea57d3abc6a4570814af5dc94e76ae2090cdb57bfd56e5cde4853c4f"} Oct 01 14:02:03 crc kubenswrapper[4605]: I1001 14:02:03.016161 4605 scope.go:117] "RemoveContainer" containerID="14967906fa0d64153aa291788e89cc88528af52a8c923ce0574129b00d98f205" Oct 01 14:02:03 crc kubenswrapper[4605]: I1001 14:02:03.016349 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55fff446b9-f5jjw" Oct 01 14:02:03 crc kubenswrapper[4605]: I1001 14:02:03.037438 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5fn4m\" (UniqueName: \"kubernetes.io/projected/676e0fa1-794f-4acf-9c92-1405d3fff0f1-kube-api-access-5fn4m\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:03 crc kubenswrapper[4605]: I1001 14:02:03.055804 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/676e0fa1-794f-4acf-9c92-1405d3fff0f1-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "676e0fa1-794f-4acf-9c92-1405d3fff0f1" (UID: "676e0fa1-794f-4acf-9c92-1405d3fff0f1"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:02:03 crc kubenswrapper[4605]: I1001 14:02:03.056296 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-2dzk7"] Oct 01 14:02:03 crc kubenswrapper[4605]: E1001 14:02:03.057163 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="676e0fa1-794f-4acf-9c92-1405d3fff0f1" containerName="init" Oct 01 14:02:03 crc kubenswrapper[4605]: I1001 14:02:03.057187 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="676e0fa1-794f-4acf-9c92-1405d3fff0f1" containerName="init" Oct 01 14:02:03 crc kubenswrapper[4605]: I1001 14:02:03.057490 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="676e0fa1-794f-4acf-9c92-1405d3fff0f1" containerName="init" Oct 01 14:02:03 crc kubenswrapper[4605]: I1001 14:02:03.067368 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/676e0fa1-794f-4acf-9c92-1405d3fff0f1-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "676e0fa1-794f-4acf-9c92-1405d3fff0f1" (UID: "676e0fa1-794f-4acf-9c92-1405d3fff0f1"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:02:03 crc kubenswrapper[4605]: I1001 14:02:03.095279 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/676e0fa1-794f-4acf-9c92-1405d3fff0f1-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "676e0fa1-794f-4acf-9c92-1405d3fff0f1" (UID: "676e0fa1-794f-4acf-9c92-1405d3fff0f1"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:02:03 crc kubenswrapper[4605]: I1001 14:02:03.095615 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5d459df97c-xmd6q" event={"ID":"2cc1af3d-7d60-4b25-9b13-d563d3f0e31b","Type":"ContainerStarted","Data":"32b5d36334950886671658fbd63dbbdfd392b4e86fa1c816a2e130481cc76a7d"} Oct 01 14:02:03 crc kubenswrapper[4605]: I1001 14:02:03.103695 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-2dzk7"] Oct 01 14:02:03 crc kubenswrapper[4605]: I1001 14:02:03.105011 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b5c85b87-2dzk7" Oct 01 14:02:03 crc kubenswrapper[4605]: I1001 14:02:03.152308 4605 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/676e0fa1-794f-4acf-9c92-1405d3fff0f1-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:03 crc kubenswrapper[4605]: I1001 14:02:03.152349 4605 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/676e0fa1-794f-4acf-9c92-1405d3fff0f1-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:03 crc kubenswrapper[4605]: I1001 14:02:03.152359 4605 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/676e0fa1-794f-4acf-9c92-1405d3fff0f1-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:03 crc kubenswrapper[4605]: I1001 14:02:03.179611 4605 scope.go:117] "RemoveContainer" containerID="14967906fa0d64153aa291788e89cc88528af52a8c923ce0574129b00d98f205" Oct 01 14:02:03 crc kubenswrapper[4605]: I1001 14:02:03.187839 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-f76447ddf-ghwkf" event={"ID":"f94fe2a1-eaf1-4f5d-9f7e-4e789f46d81c","Type":"ContainerStarted","Data":"d57d4ebd5550bcd6db311f9d594ec3fd59d181a486b4d6115bc15ebdd83c62b0"} Oct 01 14:02:03 crc kubenswrapper[4605]: E1001 14:02:03.203402 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"14967906fa0d64153aa291788e89cc88528af52a8c923ce0574129b00d98f205\": container with ID starting with 14967906fa0d64153aa291788e89cc88528af52a8c923ce0574129b00d98f205 not found: ID does not exist" containerID="14967906fa0d64153aa291788e89cc88528af52a8c923ce0574129b00d98f205" Oct 01 14:02:03 crc kubenswrapper[4605]: I1001 14:02:03.203721 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"14967906fa0d64153aa291788e89cc88528af52a8c923ce0574129b00d98f205"} err="failed to get container status \"14967906fa0d64153aa291788e89cc88528af52a8c923ce0574129b00d98f205\": rpc error: code = NotFound desc = could not find container \"14967906fa0d64153aa291788e89cc88528af52a8c923ce0574129b00d98f205\": container with ID starting with 14967906fa0d64153aa291788e89cc88528af52a8c923ce0574129b00d98f205 not found: ID does not exist" Oct 01 14:02:03 crc kubenswrapper[4605]: I1001 14:02:03.216965 4605 generic.go:334] "Generic (PLEG): container finished" podID="e43f6d4f-3963-49e1-92a3-ee75fcac6e16" containerID="3a0e2330fd672d4c6f92c0425625689ac3362c177957531e9e3c3fc1c8a7aef4" exitCode=0 Oct 01 14:02:03 crc kubenswrapper[4605]: I1001 14:02:03.217067 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76fcf4b695-ntvfv" event={"ID":"e43f6d4f-3963-49e1-92a3-ee75fcac6e16","Type":"ContainerDied","Data":"3a0e2330fd672d4c6f92c0425625689ac3362c177957531e9e3c3fc1c8a7aef4"} Oct 01 14:02:03 crc kubenswrapper[4605]: I1001 14:02:03.217099 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76fcf4b695-ntvfv" event={"ID":"e43f6d4f-3963-49e1-92a3-ee75fcac6e16","Type":"ContainerStarted","Data":"d5b1e012b6bc8cc8482c96977c52d6574055e2c3c482f43500fadb588bd7d275"} Oct 01 14:02:03 crc kubenswrapper[4605]: I1001 14:02:03.222060 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/676e0fa1-794f-4acf-9c92-1405d3fff0f1-config" (OuterVolumeSpecName: "config") pod "676e0fa1-794f-4acf-9c92-1405d3fff0f1" (UID: "676e0fa1-794f-4acf-9c92-1405d3fff0f1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:02:03 crc kubenswrapper[4605]: I1001 14:02:03.223252 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/676e0fa1-794f-4acf-9c92-1405d3fff0f1-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "676e0fa1-794f-4acf-9c92-1405d3fff0f1" (UID: "676e0fa1-794f-4acf-9c92-1405d3fff0f1"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:02:03 crc kubenswrapper[4605]: I1001 14:02:03.238164 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"df36773f-c59f-4abb-9adf-20dee81012ae","Type":"ContainerStarted","Data":"07881f407a2c0424f0d04440674ac8125faf7ce3a119954e51de155b19acccaa"} Oct 01 14:02:03 crc kubenswrapper[4605]: I1001 14:02:03.253558 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/693b90b6-22ae-41d7-8a93-3d854bc9fd81-ovsdbserver-sb\") pod \"dnsmasq-dns-8b5c85b87-2dzk7\" (UID: \"693b90b6-22ae-41d7-8a93-3d854bc9fd81\") " pod="openstack/dnsmasq-dns-8b5c85b87-2dzk7" Oct 01 14:02:03 crc kubenswrapper[4605]: I1001 14:02:03.253595 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vdxz2\" (UniqueName: \"kubernetes.io/projected/693b90b6-22ae-41d7-8a93-3d854bc9fd81-kube-api-access-vdxz2\") pod \"dnsmasq-dns-8b5c85b87-2dzk7\" (UID: \"693b90b6-22ae-41d7-8a93-3d854bc9fd81\") " pod="openstack/dnsmasq-dns-8b5c85b87-2dzk7" Oct 01 14:02:03 crc kubenswrapper[4605]: I1001 14:02:03.253655 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/693b90b6-22ae-41d7-8a93-3d854bc9fd81-ovsdbserver-nb\") pod \"dnsmasq-dns-8b5c85b87-2dzk7\" (UID: \"693b90b6-22ae-41d7-8a93-3d854bc9fd81\") " pod="openstack/dnsmasq-dns-8b5c85b87-2dzk7" Oct 01 14:02:03 crc kubenswrapper[4605]: I1001 14:02:03.253688 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/693b90b6-22ae-41d7-8a93-3d854bc9fd81-dns-svc\") pod \"dnsmasq-dns-8b5c85b87-2dzk7\" (UID: \"693b90b6-22ae-41d7-8a93-3d854bc9fd81\") " pod="openstack/dnsmasq-dns-8b5c85b87-2dzk7" Oct 01 14:02:03 crc kubenswrapper[4605]: I1001 14:02:03.253740 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/693b90b6-22ae-41d7-8a93-3d854bc9fd81-dns-swift-storage-0\") pod \"dnsmasq-dns-8b5c85b87-2dzk7\" (UID: \"693b90b6-22ae-41d7-8a93-3d854bc9fd81\") " pod="openstack/dnsmasq-dns-8b5c85b87-2dzk7" Oct 01 14:02:03 crc kubenswrapper[4605]: I1001 14:02:03.253768 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/693b90b6-22ae-41d7-8a93-3d854bc9fd81-config\") pod \"dnsmasq-dns-8b5c85b87-2dzk7\" (UID: \"693b90b6-22ae-41d7-8a93-3d854bc9fd81\") " pod="openstack/dnsmasq-dns-8b5c85b87-2dzk7" Oct 01 14:02:03 crc kubenswrapper[4605]: I1001 14:02:03.253898 4605 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/676e0fa1-794f-4acf-9c92-1405d3fff0f1-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:03 crc kubenswrapper[4605]: I1001 14:02:03.253909 4605 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/676e0fa1-794f-4acf-9c92-1405d3fff0f1-config\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:03 crc kubenswrapper[4605]: I1001 14:02:03.355723 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/693b90b6-22ae-41d7-8a93-3d854bc9fd81-ovsdbserver-nb\") pod \"dnsmasq-dns-8b5c85b87-2dzk7\" (UID: \"693b90b6-22ae-41d7-8a93-3d854bc9fd81\") " pod="openstack/dnsmasq-dns-8b5c85b87-2dzk7" Oct 01 14:02:03 crc kubenswrapper[4605]: I1001 14:02:03.356449 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/693b90b6-22ae-41d7-8a93-3d854bc9fd81-dns-svc\") pod \"dnsmasq-dns-8b5c85b87-2dzk7\" (UID: \"693b90b6-22ae-41d7-8a93-3d854bc9fd81\") " pod="openstack/dnsmasq-dns-8b5c85b87-2dzk7" Oct 01 14:02:03 crc kubenswrapper[4605]: I1001 14:02:03.356489 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/693b90b6-22ae-41d7-8a93-3d854bc9fd81-dns-swift-storage-0\") pod \"dnsmasq-dns-8b5c85b87-2dzk7\" (UID: \"693b90b6-22ae-41d7-8a93-3d854bc9fd81\") " pod="openstack/dnsmasq-dns-8b5c85b87-2dzk7" Oct 01 14:02:03 crc kubenswrapper[4605]: I1001 14:02:03.356515 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/693b90b6-22ae-41d7-8a93-3d854bc9fd81-config\") pod \"dnsmasq-dns-8b5c85b87-2dzk7\" (UID: \"693b90b6-22ae-41d7-8a93-3d854bc9fd81\") " pod="openstack/dnsmasq-dns-8b5c85b87-2dzk7" Oct 01 14:02:03 crc kubenswrapper[4605]: I1001 14:02:03.357494 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/693b90b6-22ae-41d7-8a93-3d854bc9fd81-config\") pod \"dnsmasq-dns-8b5c85b87-2dzk7\" (UID: \"693b90b6-22ae-41d7-8a93-3d854bc9fd81\") " pod="openstack/dnsmasq-dns-8b5c85b87-2dzk7" Oct 01 14:02:03 crc kubenswrapper[4605]: I1001 14:02:03.358055 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/693b90b6-22ae-41d7-8a93-3d854bc9fd81-ovsdbserver-nb\") pod \"dnsmasq-dns-8b5c85b87-2dzk7\" (UID: \"693b90b6-22ae-41d7-8a93-3d854bc9fd81\") " pod="openstack/dnsmasq-dns-8b5c85b87-2dzk7" Oct 01 14:02:03 crc kubenswrapper[4605]: I1001 14:02:03.362183 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/693b90b6-22ae-41d7-8a93-3d854bc9fd81-dns-svc\") pod \"dnsmasq-dns-8b5c85b87-2dzk7\" (UID: \"693b90b6-22ae-41d7-8a93-3d854bc9fd81\") " pod="openstack/dnsmasq-dns-8b5c85b87-2dzk7" Oct 01 14:02:03 crc kubenswrapper[4605]: I1001 14:02:03.362407 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/693b90b6-22ae-41d7-8a93-3d854bc9fd81-ovsdbserver-sb\") pod \"dnsmasq-dns-8b5c85b87-2dzk7\" (UID: \"693b90b6-22ae-41d7-8a93-3d854bc9fd81\") " pod="openstack/dnsmasq-dns-8b5c85b87-2dzk7" Oct 01 14:02:03 crc kubenswrapper[4605]: I1001 14:02:03.362446 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vdxz2\" (UniqueName: \"kubernetes.io/projected/693b90b6-22ae-41d7-8a93-3d854bc9fd81-kube-api-access-vdxz2\") pod \"dnsmasq-dns-8b5c85b87-2dzk7\" (UID: \"693b90b6-22ae-41d7-8a93-3d854bc9fd81\") " pod="openstack/dnsmasq-dns-8b5c85b87-2dzk7" Oct 01 14:02:03 crc kubenswrapper[4605]: I1001 14:02:03.362805 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/693b90b6-22ae-41d7-8a93-3d854bc9fd81-dns-swift-storage-0\") pod \"dnsmasq-dns-8b5c85b87-2dzk7\" (UID: \"693b90b6-22ae-41d7-8a93-3d854bc9fd81\") " pod="openstack/dnsmasq-dns-8b5c85b87-2dzk7" Oct 01 14:02:03 crc kubenswrapper[4605]: I1001 14:02:03.363308 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/693b90b6-22ae-41d7-8a93-3d854bc9fd81-ovsdbserver-sb\") pod \"dnsmasq-dns-8b5c85b87-2dzk7\" (UID: \"693b90b6-22ae-41d7-8a93-3d854bc9fd81\") " pod="openstack/dnsmasq-dns-8b5c85b87-2dzk7" Oct 01 14:02:03 crc kubenswrapper[4605]: I1001 14:02:03.386316 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vdxz2\" (UniqueName: \"kubernetes.io/projected/693b90b6-22ae-41d7-8a93-3d854bc9fd81-kube-api-access-vdxz2\") pod \"dnsmasq-dns-8b5c85b87-2dzk7\" (UID: \"693b90b6-22ae-41d7-8a93-3d854bc9fd81\") " pod="openstack/dnsmasq-dns-8b5c85b87-2dzk7" Oct 01 14:02:03 crc kubenswrapper[4605]: I1001 14:02:03.494491 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b5c85b87-2dzk7" Oct 01 14:02:03 crc kubenswrapper[4605]: I1001 14:02:03.585493 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-55fff446b9-f5jjw"] Oct 01 14:02:03 crc kubenswrapper[4605]: I1001 14:02:03.615533 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-55fff446b9-f5jjw"] Oct 01 14:02:03 crc kubenswrapper[4605]: I1001 14:02:03.712368 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Oct 01 14:02:03 crc kubenswrapper[4605]: I1001 14:02:03.713847 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 01 14:02:03 crc kubenswrapper[4605]: I1001 14:02:03.729679 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Oct 01 14:02:03 crc kubenswrapper[4605]: I1001 14:02:03.730061 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-nbts5" Oct 01 14:02:03 crc kubenswrapper[4605]: I1001 14:02:03.730179 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Oct 01 14:02:03 crc kubenswrapper[4605]: I1001 14:02:03.734055 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-978c2"] Oct 01 14:02:03 crc kubenswrapper[4605]: I1001 14:02:03.757208 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 01 14:02:03 crc kubenswrapper[4605]: I1001 14:02:03.817157 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 01 14:02:03 crc kubenswrapper[4605]: E1001 14:02:03.817744 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[combined-ca-bundle config-data glance httpd-run kube-api-access-8pm4p logs scripts], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/glance-default-external-api-0" podUID="e08f5d59-4b00-48d5-8c38-6bbdcd91447b" Oct 01 14:02:03 crc kubenswrapper[4605]: E1001 14:02:03.831836 4605 log.go:32] "CreateContainer in sandbox from runtime service failed" err=< Oct 01 14:02:03 crc kubenswrapper[4605]: rpc error: code = Unknown desc = container create failed: mount `/var/lib/kubelet/pods/e43f6d4f-3963-49e1-92a3-ee75fcac6e16/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Oct 01 14:02:03 crc kubenswrapper[4605]: > podSandboxID="d5b1e012b6bc8cc8482c96977c52d6574055e2c3c482f43500fadb588bd7d275" Oct 01 14:02:03 crc kubenswrapper[4605]: E1001 14:02:03.832006 4605 kuberuntime_manager.go:1274] "Unhandled Error" err=< Oct 01 14:02:03 crc kubenswrapper[4605]: container &Container{Name:dnsmasq-dns,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n66hbbh89h5c9h565h54bh5dch67fh7bh8fh76h5d7h689hdh5ddh59h568h56ch656h647h555hc6h579hcch68bh65h6dhbh677h68bh694h59dq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-swift-storage-0,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-swift-storage-0,SubPath:dns-swift-storage-0,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-nb,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/ovsdbserver-nb,SubPath:ovsdbserver-nb,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-sb,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/ovsdbserver-sb,SubPath:ovsdbserver-sb,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-fz8xq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-76fcf4b695-ntvfv_openstack(e43f6d4f-3963-49e1-92a3-ee75fcac6e16): CreateContainerError: container create failed: mount `/var/lib/kubelet/pods/e43f6d4f-3963-49e1-92a3-ee75fcac6e16/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Oct 01 14:02:03 crc kubenswrapper[4605]: > logger="UnhandledError" Oct 01 14:02:03 crc kubenswrapper[4605]: E1001 14:02:03.834203 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"dnsmasq-dns\" with CreateContainerError: \"container create failed: mount `/var/lib/kubelet/pods/e43f6d4f-3963-49e1-92a3-ee75fcac6e16/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory\\n\"" pod="openstack/dnsmasq-dns-76fcf4b695-ntvfv" podUID="e43f6d4f-3963-49e1-92a3-ee75fcac6e16" Oct 01 14:02:03 crc kubenswrapper[4605]: I1001 14:02:03.856166 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-5d459df97c-xmd6q"] Oct 01 14:02:03 crc kubenswrapper[4605]: W1001 14:02:03.858163 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod287fa988_b116_4b4d_a02c_990e801124d0.slice/crio-53b88e1cb5dc2a7716904bc2dcf513a17220f39d8228e44d8172bd2e27ac0026 WatchSource:0}: Error finding container 53b88e1cb5dc2a7716904bc2dcf513a17220f39d8228e44d8172bd2e27ac0026: Status 404 returned error can't find the container with id 53b88e1cb5dc2a7716904bc2dcf513a17220f39d8228e44d8172bd2e27ac0026 Oct 01 14:02:03 crc kubenswrapper[4605]: I1001 14:02:03.886207 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e08f5d59-4b00-48d5-8c38-6bbdcd91447b-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"e08f5d59-4b00-48d5-8c38-6bbdcd91447b\") " pod="openstack/glance-default-external-api-0" Oct 01 14:02:03 crc kubenswrapper[4605]: I1001 14:02:03.886257 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e08f5d59-4b00-48d5-8c38-6bbdcd91447b-scripts\") pod \"glance-default-external-api-0\" (UID: \"e08f5d59-4b00-48d5-8c38-6bbdcd91447b\") " pod="openstack/glance-default-external-api-0" Oct 01 14:02:03 crc kubenswrapper[4605]: I1001 14:02:03.886277 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e08f5d59-4b00-48d5-8c38-6bbdcd91447b-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"e08f5d59-4b00-48d5-8c38-6bbdcd91447b\") " pod="openstack/glance-default-external-api-0" Oct 01 14:02:03 crc kubenswrapper[4605]: I1001 14:02:03.886299 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e08f5d59-4b00-48d5-8c38-6bbdcd91447b-logs\") pod \"glance-default-external-api-0\" (UID: \"e08f5d59-4b00-48d5-8c38-6bbdcd91447b\") " pod="openstack/glance-default-external-api-0" Oct 01 14:02:03 crc kubenswrapper[4605]: I1001 14:02:03.886328 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8pm4p\" (UniqueName: \"kubernetes.io/projected/e08f5d59-4b00-48d5-8c38-6bbdcd91447b-kube-api-access-8pm4p\") pod \"glance-default-external-api-0\" (UID: \"e08f5d59-4b00-48d5-8c38-6bbdcd91447b\") " pod="openstack/glance-default-external-api-0" Oct 01 14:02:03 crc kubenswrapper[4605]: I1001 14:02:03.886351 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"e08f5d59-4b00-48d5-8c38-6bbdcd91447b\") " pod="openstack/glance-default-external-api-0" Oct 01 14:02:03 crc kubenswrapper[4605]: I1001 14:02:03.897889 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e08f5d59-4b00-48d5-8c38-6bbdcd91447b-config-data\") pod \"glance-default-external-api-0\" (UID: \"e08f5d59-4b00-48d5-8c38-6bbdcd91447b\") " pod="openstack/glance-default-external-api-0" Oct 01 14:02:03 crc kubenswrapper[4605]: I1001 14:02:03.900565 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-7997989dbf-ht8bk"] Oct 01 14:02:03 crc kubenswrapper[4605]: I1001 14:02:03.901936 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7997989dbf-ht8bk" Oct 01 14:02:03 crc kubenswrapper[4605]: I1001 14:02:03.995617 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="676e0fa1-794f-4acf-9c92-1405d3fff0f1" path="/var/lib/kubelet/pods/676e0fa1-794f-4acf-9c92-1405d3fff0f1/volumes" Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.001569 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e08f5d59-4b00-48d5-8c38-6bbdcd91447b-scripts\") pod \"glance-default-external-api-0\" (UID: \"e08f5d59-4b00-48d5-8c38-6bbdcd91447b\") " pod="openstack/glance-default-external-api-0" Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.001634 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e08f5d59-4b00-48d5-8c38-6bbdcd91447b-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"e08f5d59-4b00-48d5-8c38-6bbdcd91447b\") " pod="openstack/glance-default-external-api-0" Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.001694 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c4gdt\" (UniqueName: \"kubernetes.io/projected/04aa43c0-e349-48c1-bd68-53fdbbb92639-kube-api-access-c4gdt\") pod \"horizon-7997989dbf-ht8bk\" (UID: \"04aa43c0-e349-48c1-bd68-53fdbbb92639\") " pod="openstack/horizon-7997989dbf-ht8bk" Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.001722 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e08f5d59-4b00-48d5-8c38-6bbdcd91447b-logs\") pod \"glance-default-external-api-0\" (UID: \"e08f5d59-4b00-48d5-8c38-6bbdcd91447b\") " pod="openstack/glance-default-external-api-0" Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.001775 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8pm4p\" (UniqueName: \"kubernetes.io/projected/e08f5d59-4b00-48d5-8c38-6bbdcd91447b-kube-api-access-8pm4p\") pod \"glance-default-external-api-0\" (UID: \"e08f5d59-4b00-48d5-8c38-6bbdcd91447b\") " pod="openstack/glance-default-external-api-0" Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.001810 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/04aa43c0-e349-48c1-bd68-53fdbbb92639-config-data\") pod \"horizon-7997989dbf-ht8bk\" (UID: \"04aa43c0-e349-48c1-bd68-53fdbbb92639\") " pod="openstack/horizon-7997989dbf-ht8bk" Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.002043 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7997989dbf-ht8bk"] Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.002354 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"e08f5d59-4b00-48d5-8c38-6bbdcd91447b\") " pod="openstack/glance-default-external-api-0" Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.002417 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/04aa43c0-e349-48c1-bd68-53fdbbb92639-horizon-secret-key\") pod \"horizon-7997989dbf-ht8bk\" (UID: \"04aa43c0-e349-48c1-bd68-53fdbbb92639\") " pod="openstack/horizon-7997989dbf-ht8bk" Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.002589 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e08f5d59-4b00-48d5-8c38-6bbdcd91447b-config-data\") pod \"glance-default-external-api-0\" (UID: \"e08f5d59-4b00-48d5-8c38-6bbdcd91447b\") " pod="openstack/glance-default-external-api-0" Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.002632 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/04aa43c0-e349-48c1-bd68-53fdbbb92639-logs\") pod \"horizon-7997989dbf-ht8bk\" (UID: \"04aa43c0-e349-48c1-bd68-53fdbbb92639\") " pod="openstack/horizon-7997989dbf-ht8bk" Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.002672 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/04aa43c0-e349-48c1-bd68-53fdbbb92639-scripts\") pod \"horizon-7997989dbf-ht8bk\" (UID: \"04aa43c0-e349-48c1-bd68-53fdbbb92639\") " pod="openstack/horizon-7997989dbf-ht8bk" Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.002692 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e08f5d59-4b00-48d5-8c38-6bbdcd91447b-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"e08f5d59-4b00-48d5-8c38-6bbdcd91447b\") " pod="openstack/glance-default-external-api-0" Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.003333 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e08f5d59-4b00-48d5-8c38-6bbdcd91447b-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"e08f5d59-4b00-48d5-8c38-6bbdcd91447b\") " pod="openstack/glance-default-external-api-0" Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.008980 4605 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"e08f5d59-4b00-48d5-8c38-6bbdcd91447b\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/glance-default-external-api-0" Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.011778 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e08f5d59-4b00-48d5-8c38-6bbdcd91447b-logs\") pod \"glance-default-external-api-0\" (UID: \"e08f5d59-4b00-48d5-8c38-6bbdcd91447b\") " pod="openstack/glance-default-external-api-0" Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.013431 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.015658 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e08f5d59-4b00-48d5-8c38-6bbdcd91447b-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"e08f5d59-4b00-48d5-8c38-6bbdcd91447b\") " pod="openstack/glance-default-external-api-0" Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.016705 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e08f5d59-4b00-48d5-8c38-6bbdcd91447b-scripts\") pod \"glance-default-external-api-0\" (UID: \"e08f5d59-4b00-48d5-8c38-6bbdcd91447b\") " pod="openstack/glance-default-external-api-0" Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.017369 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e08f5d59-4b00-48d5-8c38-6bbdcd91447b-config-data\") pod \"glance-default-external-api-0\" (UID: \"e08f5d59-4b00-48d5-8c38-6bbdcd91447b\") " pod="openstack/glance-default-external-api-0" Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.038594 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8pm4p\" (UniqueName: \"kubernetes.io/projected/e08f5d59-4b00-48d5-8c38-6bbdcd91447b-kube-api-access-8pm4p\") pod \"glance-default-external-api-0\" (UID: \"e08f5d59-4b00-48d5-8c38-6bbdcd91447b\") " pod="openstack/glance-default-external-api-0" Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.066202 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.073850 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.078574 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.084696 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.116942 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/04aa43c0-e349-48c1-bd68-53fdbbb92639-config-data\") pod \"horizon-7997989dbf-ht8bk\" (UID: \"04aa43c0-e349-48c1-bd68-53fdbbb92639\") " pod="openstack/horizon-7997989dbf-ht8bk" Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.117040 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/04aa43c0-e349-48c1-bd68-53fdbbb92639-horizon-secret-key\") pod \"horizon-7997989dbf-ht8bk\" (UID: \"04aa43c0-e349-48c1-bd68-53fdbbb92639\") " pod="openstack/horizon-7997989dbf-ht8bk" Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.117230 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/04aa43c0-e349-48c1-bd68-53fdbbb92639-logs\") pod \"horizon-7997989dbf-ht8bk\" (UID: \"04aa43c0-e349-48c1-bd68-53fdbbb92639\") " pod="openstack/horizon-7997989dbf-ht8bk" Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.117267 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/04aa43c0-e349-48c1-bd68-53fdbbb92639-scripts\") pod \"horizon-7997989dbf-ht8bk\" (UID: \"04aa43c0-e349-48c1-bd68-53fdbbb92639\") " pod="openstack/horizon-7997989dbf-ht8bk" Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.117347 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c4gdt\" (UniqueName: \"kubernetes.io/projected/04aa43c0-e349-48c1-bd68-53fdbbb92639-kube-api-access-c4gdt\") pod \"horizon-7997989dbf-ht8bk\" (UID: \"04aa43c0-e349-48c1-bd68-53fdbbb92639\") " pod="openstack/horizon-7997989dbf-ht8bk" Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.118271 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/04aa43c0-e349-48c1-bd68-53fdbbb92639-config-data\") pod \"horizon-7997989dbf-ht8bk\" (UID: \"04aa43c0-e349-48c1-bd68-53fdbbb92639\") " pod="openstack/horizon-7997989dbf-ht8bk" Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.118476 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/04aa43c0-e349-48c1-bd68-53fdbbb92639-logs\") pod \"horizon-7997989dbf-ht8bk\" (UID: \"04aa43c0-e349-48c1-bd68-53fdbbb92639\") " pod="openstack/horizon-7997989dbf-ht8bk" Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.118886 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/04aa43c0-e349-48c1-bd68-53fdbbb92639-scripts\") pod \"horizon-7997989dbf-ht8bk\" (UID: \"04aa43c0-e349-48c1-bd68-53fdbbb92639\") " pod="openstack/horizon-7997989dbf-ht8bk" Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.130844 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"e08f5d59-4b00-48d5-8c38-6bbdcd91447b\") " pod="openstack/glance-default-external-api-0" Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.136433 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c4gdt\" (UniqueName: \"kubernetes.io/projected/04aa43c0-e349-48c1-bd68-53fdbbb92639-kube-api-access-c4gdt\") pod \"horizon-7997989dbf-ht8bk\" (UID: \"04aa43c0-e349-48c1-bd68-53fdbbb92639\") " pod="openstack/horizon-7997989dbf-ht8bk" Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.137350 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/04aa43c0-e349-48c1-bd68-53fdbbb92639-horizon-secret-key\") pod \"horizon-7997989dbf-ht8bk\" (UID: \"04aa43c0-e349-48c1-bd68-53fdbbb92639\") " pod="openstack/horizon-7997989dbf-ht8bk" Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.182223 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-2dzk7"] Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.220019 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/77da7acd-19f2-4a61-920d-d341bd5a4598-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"77da7acd-19f2-4a61-920d-d341bd5a4598\") " pod="openstack/glance-default-internal-api-0" Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.220060 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9h8gd\" (UniqueName: \"kubernetes.io/projected/77da7acd-19f2-4a61-920d-d341bd5a4598-kube-api-access-9h8gd\") pod \"glance-default-internal-api-0\" (UID: \"77da7acd-19f2-4a61-920d-d341bd5a4598\") " pod="openstack/glance-default-internal-api-0" Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.220081 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/77da7acd-19f2-4a61-920d-d341bd5a4598-config-data\") pod \"glance-default-internal-api-0\" (UID: \"77da7acd-19f2-4a61-920d-d341bd5a4598\") " pod="openstack/glance-default-internal-api-0" Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.220726 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/77da7acd-19f2-4a61-920d-d341bd5a4598-scripts\") pod \"glance-default-internal-api-0\" (UID: \"77da7acd-19f2-4a61-920d-d341bd5a4598\") " pod="openstack/glance-default-internal-api-0" Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.220765 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/77da7acd-19f2-4a61-920d-d341bd5a4598-logs\") pod \"glance-default-internal-api-0\" (UID: \"77da7acd-19f2-4a61-920d-d341bd5a4598\") " pod="openstack/glance-default-internal-api-0" Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.220789 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"77da7acd-19f2-4a61-920d-d341bd5a4598\") " pod="openstack/glance-default-internal-api-0" Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.220812 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77da7acd-19f2-4a61-920d-d341bd5a4598-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"77da7acd-19f2-4a61-920d-d341bd5a4598\") " pod="openstack/glance-default-internal-api-0" Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.263415 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-978c2" event={"ID":"287fa988-b116-4b4d-a02c-990e801124d0","Type":"ContainerStarted","Data":"53b88e1cb5dc2a7716904bc2dcf513a17220f39d8228e44d8172bd2e27ac0026"} Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.271891 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.273159 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b5c85b87-2dzk7" event={"ID":"693b90b6-22ae-41d7-8a93-3d854bc9fd81","Type":"ContainerStarted","Data":"c20f0c1cf41e68b2081bfac252beb5f88a6fadcc45167a93748781e59ea83056"} Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.277415 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7997989dbf-ht8bk" Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.295756 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.327210 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/77da7acd-19f2-4a61-920d-d341bd5a4598-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"77da7acd-19f2-4a61-920d-d341bd5a4598\") " pod="openstack/glance-default-internal-api-0" Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.327269 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9h8gd\" (UniqueName: \"kubernetes.io/projected/77da7acd-19f2-4a61-920d-d341bd5a4598-kube-api-access-9h8gd\") pod \"glance-default-internal-api-0\" (UID: \"77da7acd-19f2-4a61-920d-d341bd5a4598\") " pod="openstack/glance-default-internal-api-0" Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.327292 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/77da7acd-19f2-4a61-920d-d341bd5a4598-config-data\") pod \"glance-default-internal-api-0\" (UID: \"77da7acd-19f2-4a61-920d-d341bd5a4598\") " pod="openstack/glance-default-internal-api-0" Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.327346 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/77da7acd-19f2-4a61-920d-d341bd5a4598-scripts\") pod \"glance-default-internal-api-0\" (UID: \"77da7acd-19f2-4a61-920d-d341bd5a4598\") " pod="openstack/glance-default-internal-api-0" Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.327373 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/77da7acd-19f2-4a61-920d-d341bd5a4598-logs\") pod \"glance-default-internal-api-0\" (UID: \"77da7acd-19f2-4a61-920d-d341bd5a4598\") " pod="openstack/glance-default-internal-api-0" Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.327422 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"77da7acd-19f2-4a61-920d-d341bd5a4598\") " pod="openstack/glance-default-internal-api-0" Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.327448 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77da7acd-19f2-4a61-920d-d341bd5a4598-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"77da7acd-19f2-4a61-920d-d341bd5a4598\") " pod="openstack/glance-default-internal-api-0" Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.328571 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/77da7acd-19f2-4a61-920d-d341bd5a4598-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"77da7acd-19f2-4a61-920d-d341bd5a4598\") " pod="openstack/glance-default-internal-api-0" Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.329078 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/77da7acd-19f2-4a61-920d-d341bd5a4598-logs\") pod \"glance-default-internal-api-0\" (UID: \"77da7acd-19f2-4a61-920d-d341bd5a4598\") " pod="openstack/glance-default-internal-api-0" Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.330013 4605 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"77da7acd-19f2-4a61-920d-d341bd5a4598\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/glance-default-internal-api-0" Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.341275 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/77da7acd-19f2-4a61-920d-d341bd5a4598-config-data\") pod \"glance-default-internal-api-0\" (UID: \"77da7acd-19f2-4a61-920d-d341bd5a4598\") " pod="openstack/glance-default-internal-api-0" Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.348857 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/77da7acd-19f2-4a61-920d-d341bd5a4598-scripts\") pod \"glance-default-internal-api-0\" (UID: \"77da7acd-19f2-4a61-920d-d341bd5a4598\") " pod="openstack/glance-default-internal-api-0" Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.349552 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77da7acd-19f2-4a61-920d-d341bd5a4598-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"77da7acd-19f2-4a61-920d-d341bd5a4598\") " pod="openstack/glance-default-internal-api-0" Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.350933 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9h8gd\" (UniqueName: \"kubernetes.io/projected/77da7acd-19f2-4a61-920d-d341bd5a4598-kube-api-access-9h8gd\") pod \"glance-default-internal-api-0\" (UID: \"77da7acd-19f2-4a61-920d-d341bd5a4598\") " pod="openstack/glance-default-internal-api-0" Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.392393 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"77da7acd-19f2-4a61-920d-d341bd5a4598\") " pod="openstack/glance-default-internal-api-0" Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.407661 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.432609 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"e08f5d59-4b00-48d5-8c38-6bbdcd91447b\" (UID: \"e08f5d59-4b00-48d5-8c38-6bbdcd91447b\") " Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.432652 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e08f5d59-4b00-48d5-8c38-6bbdcd91447b-combined-ca-bundle\") pod \"e08f5d59-4b00-48d5-8c38-6bbdcd91447b\" (UID: \"e08f5d59-4b00-48d5-8c38-6bbdcd91447b\") " Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.432702 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e08f5d59-4b00-48d5-8c38-6bbdcd91447b-scripts\") pod \"e08f5d59-4b00-48d5-8c38-6bbdcd91447b\" (UID: \"e08f5d59-4b00-48d5-8c38-6bbdcd91447b\") " Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.432732 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e08f5d59-4b00-48d5-8c38-6bbdcd91447b-httpd-run\") pod \"e08f5d59-4b00-48d5-8c38-6bbdcd91447b\" (UID: \"e08f5d59-4b00-48d5-8c38-6bbdcd91447b\") " Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.432793 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e08f5d59-4b00-48d5-8c38-6bbdcd91447b-config-data\") pod \"e08f5d59-4b00-48d5-8c38-6bbdcd91447b\" (UID: \"e08f5d59-4b00-48d5-8c38-6bbdcd91447b\") " Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.432815 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8pm4p\" (UniqueName: \"kubernetes.io/projected/e08f5d59-4b00-48d5-8c38-6bbdcd91447b-kube-api-access-8pm4p\") pod \"e08f5d59-4b00-48d5-8c38-6bbdcd91447b\" (UID: \"e08f5d59-4b00-48d5-8c38-6bbdcd91447b\") " Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.432889 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e08f5d59-4b00-48d5-8c38-6bbdcd91447b-logs\") pod \"e08f5d59-4b00-48d5-8c38-6bbdcd91447b\" (UID: \"e08f5d59-4b00-48d5-8c38-6bbdcd91447b\") " Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.433887 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e08f5d59-4b00-48d5-8c38-6bbdcd91447b-logs" (OuterVolumeSpecName: "logs") pod "e08f5d59-4b00-48d5-8c38-6bbdcd91447b" (UID: "e08f5d59-4b00-48d5-8c38-6bbdcd91447b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.434477 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e08f5d59-4b00-48d5-8c38-6bbdcd91447b-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "e08f5d59-4b00-48d5-8c38-6bbdcd91447b" (UID: "e08f5d59-4b00-48d5-8c38-6bbdcd91447b"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.441059 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "glance") pod "e08f5d59-4b00-48d5-8c38-6bbdcd91447b" (UID: "e08f5d59-4b00-48d5-8c38-6bbdcd91447b"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.445484 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e08f5d59-4b00-48d5-8c38-6bbdcd91447b-scripts" (OuterVolumeSpecName: "scripts") pod "e08f5d59-4b00-48d5-8c38-6bbdcd91447b" (UID: "e08f5d59-4b00-48d5-8c38-6bbdcd91447b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.448163 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e08f5d59-4b00-48d5-8c38-6bbdcd91447b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e08f5d59-4b00-48d5-8c38-6bbdcd91447b" (UID: "e08f5d59-4b00-48d5-8c38-6bbdcd91447b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.453868 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e08f5d59-4b00-48d5-8c38-6bbdcd91447b-config-data" (OuterVolumeSpecName: "config-data") pod "e08f5d59-4b00-48d5-8c38-6bbdcd91447b" (UID: "e08f5d59-4b00-48d5-8c38-6bbdcd91447b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.476611 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e08f5d59-4b00-48d5-8c38-6bbdcd91447b-kube-api-access-8pm4p" (OuterVolumeSpecName: "kube-api-access-8pm4p") pod "e08f5d59-4b00-48d5-8c38-6bbdcd91447b" (UID: "e08f5d59-4b00-48d5-8c38-6bbdcd91447b"). InnerVolumeSpecName "kube-api-access-8pm4p". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.541928 4605 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e08f5d59-4b00-48d5-8c38-6bbdcd91447b-logs\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.542206 4605 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.542219 4605 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e08f5d59-4b00-48d5-8c38-6bbdcd91447b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.542230 4605 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e08f5d59-4b00-48d5-8c38-6bbdcd91447b-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.542244 4605 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e08f5d59-4b00-48d5-8c38-6bbdcd91447b-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.542253 4605 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e08f5d59-4b00-48d5-8c38-6bbdcd91447b-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.542261 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8pm4p\" (UniqueName: \"kubernetes.io/projected/e08f5d59-4b00-48d5-8c38-6bbdcd91447b-kube-api-access-8pm4p\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.631243 4605 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.644505 4605 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.889373 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76fcf4b695-ntvfv" Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.950267 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e43f6d4f-3963-49e1-92a3-ee75fcac6e16-ovsdbserver-nb\") pod \"e43f6d4f-3963-49e1-92a3-ee75fcac6e16\" (UID: \"e43f6d4f-3963-49e1-92a3-ee75fcac6e16\") " Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.950553 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e43f6d4f-3963-49e1-92a3-ee75fcac6e16-config\") pod \"e43f6d4f-3963-49e1-92a3-ee75fcac6e16\" (UID: \"e43f6d4f-3963-49e1-92a3-ee75fcac6e16\") " Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.951497 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e43f6d4f-3963-49e1-92a3-ee75fcac6e16-ovsdbserver-sb\") pod \"e43f6d4f-3963-49e1-92a3-ee75fcac6e16\" (UID: \"e43f6d4f-3963-49e1-92a3-ee75fcac6e16\") " Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.953490 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e43f6d4f-3963-49e1-92a3-ee75fcac6e16-dns-swift-storage-0\") pod \"e43f6d4f-3963-49e1-92a3-ee75fcac6e16\" (UID: \"e43f6d4f-3963-49e1-92a3-ee75fcac6e16\") " Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.953684 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e43f6d4f-3963-49e1-92a3-ee75fcac6e16-dns-svc\") pod \"e43f6d4f-3963-49e1-92a3-ee75fcac6e16\" (UID: \"e43f6d4f-3963-49e1-92a3-ee75fcac6e16\") " Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.953725 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fz8xq\" (UniqueName: \"kubernetes.io/projected/e43f6d4f-3963-49e1-92a3-ee75fcac6e16-kube-api-access-fz8xq\") pod \"e43f6d4f-3963-49e1-92a3-ee75fcac6e16\" (UID: \"e43f6d4f-3963-49e1-92a3-ee75fcac6e16\") " Oct 01 14:02:04 crc kubenswrapper[4605]: I1001 14:02:04.971504 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e43f6d4f-3963-49e1-92a3-ee75fcac6e16-kube-api-access-fz8xq" (OuterVolumeSpecName: "kube-api-access-fz8xq") pod "e43f6d4f-3963-49e1-92a3-ee75fcac6e16" (UID: "e43f6d4f-3963-49e1-92a3-ee75fcac6e16"). InnerVolumeSpecName "kube-api-access-fz8xq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:02:05 crc kubenswrapper[4605]: I1001 14:02:05.057602 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e43f6d4f-3963-49e1-92a3-ee75fcac6e16-config" (OuterVolumeSpecName: "config") pod "e43f6d4f-3963-49e1-92a3-ee75fcac6e16" (UID: "e43f6d4f-3963-49e1-92a3-ee75fcac6e16"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:02:05 crc kubenswrapper[4605]: I1001 14:02:05.061545 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fz8xq\" (UniqueName: \"kubernetes.io/projected/e43f6d4f-3963-49e1-92a3-ee75fcac6e16-kube-api-access-fz8xq\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:05 crc kubenswrapper[4605]: I1001 14:02:05.061586 4605 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e43f6d4f-3963-49e1-92a3-ee75fcac6e16-config\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:05 crc kubenswrapper[4605]: I1001 14:02:05.056232 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e43f6d4f-3963-49e1-92a3-ee75fcac6e16-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "e43f6d4f-3963-49e1-92a3-ee75fcac6e16" (UID: "e43f6d4f-3963-49e1-92a3-ee75fcac6e16"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:02:05 crc kubenswrapper[4605]: I1001 14:02:05.062628 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e43f6d4f-3963-49e1-92a3-ee75fcac6e16-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "e43f6d4f-3963-49e1-92a3-ee75fcac6e16" (UID: "e43f6d4f-3963-49e1-92a3-ee75fcac6e16"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:02:05 crc kubenswrapper[4605]: I1001 14:02:05.062770 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e43f6d4f-3963-49e1-92a3-ee75fcac6e16-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "e43f6d4f-3963-49e1-92a3-ee75fcac6e16" (UID: "e43f6d4f-3963-49e1-92a3-ee75fcac6e16"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:02:05 crc kubenswrapper[4605]: I1001 14:02:05.099158 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7997989dbf-ht8bk"] Oct 01 14:02:05 crc kubenswrapper[4605]: I1001 14:02:05.101745 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e43f6d4f-3963-49e1-92a3-ee75fcac6e16-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "e43f6d4f-3963-49e1-92a3-ee75fcac6e16" (UID: "e43f6d4f-3963-49e1-92a3-ee75fcac6e16"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:02:05 crc kubenswrapper[4605]: W1001 14:02:05.104197 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod04aa43c0_e349_48c1_bd68_53fdbbb92639.slice/crio-27eb29b1051418a83e193a0e731e6016eca4716da276703ddd2034c5790df752 WatchSource:0}: Error finding container 27eb29b1051418a83e193a0e731e6016eca4716da276703ddd2034c5790df752: Status 404 returned error can't find the container with id 27eb29b1051418a83e193a0e731e6016eca4716da276703ddd2034c5790df752 Oct 01 14:02:05 crc kubenswrapper[4605]: I1001 14:02:05.163598 4605 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e43f6d4f-3963-49e1-92a3-ee75fcac6e16-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:05 crc kubenswrapper[4605]: I1001 14:02:05.163634 4605 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e43f6d4f-3963-49e1-92a3-ee75fcac6e16-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:05 crc kubenswrapper[4605]: I1001 14:02:05.163645 4605 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e43f6d4f-3963-49e1-92a3-ee75fcac6e16-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:05 crc kubenswrapper[4605]: I1001 14:02:05.163654 4605 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e43f6d4f-3963-49e1-92a3-ee75fcac6e16-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:05 crc kubenswrapper[4605]: I1001 14:02:05.302721 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7997989dbf-ht8bk" event={"ID":"04aa43c0-e349-48c1-bd68-53fdbbb92639","Type":"ContainerStarted","Data":"27eb29b1051418a83e193a0e731e6016eca4716da276703ddd2034c5790df752"} Oct 01 14:02:05 crc kubenswrapper[4605]: I1001 14:02:05.307733 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76fcf4b695-ntvfv" Oct 01 14:02:05 crc kubenswrapper[4605]: I1001 14:02:05.307818 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76fcf4b695-ntvfv" event={"ID":"e43f6d4f-3963-49e1-92a3-ee75fcac6e16","Type":"ContainerDied","Data":"d5b1e012b6bc8cc8482c96977c52d6574055e2c3c482f43500fadb588bd7d275"} Oct 01 14:02:05 crc kubenswrapper[4605]: I1001 14:02:05.308330 4605 scope.go:117] "RemoveContainer" containerID="3a0e2330fd672d4c6f92c0425625689ac3362c177957531e9e3c3fc1c8a7aef4" Oct 01 14:02:05 crc kubenswrapper[4605]: I1001 14:02:05.312890 4605 generic.go:334] "Generic (PLEG): container finished" podID="693b90b6-22ae-41d7-8a93-3d854bc9fd81" containerID="d9b3f5d9a48843e08744e4608803f148ce1f280d91c0d3559796eae3b8c0c3cb" exitCode=0 Oct 01 14:02:05 crc kubenswrapper[4605]: I1001 14:02:05.312966 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 01 14:02:05 crc kubenswrapper[4605]: I1001 14:02:05.315198 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b5c85b87-2dzk7" event={"ID":"693b90b6-22ae-41d7-8a93-3d854bc9fd81","Type":"ContainerDied","Data":"d9b3f5d9a48843e08744e4608803f148ce1f280d91c0d3559796eae3b8c0c3cb"} Oct 01 14:02:05 crc kubenswrapper[4605]: I1001 14:02:05.439693 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 01 14:02:05 crc kubenswrapper[4605]: I1001 14:02:05.597761 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-76fcf4b695-ntvfv"] Oct 01 14:02:05 crc kubenswrapper[4605]: I1001 14:02:05.613073 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-76fcf4b695-ntvfv"] Oct 01 14:02:05 crc kubenswrapper[4605]: I1001 14:02:05.695940 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 01 14:02:05 crc kubenswrapper[4605]: I1001 14:02:05.703694 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 01 14:02:05 crc kubenswrapper[4605]: I1001 14:02:05.712393 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Oct 01 14:02:05 crc kubenswrapper[4605]: E1001 14:02:05.712886 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e43f6d4f-3963-49e1-92a3-ee75fcac6e16" containerName="init" Oct 01 14:02:05 crc kubenswrapper[4605]: I1001 14:02:05.712899 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="e43f6d4f-3963-49e1-92a3-ee75fcac6e16" containerName="init" Oct 01 14:02:05 crc kubenswrapper[4605]: I1001 14:02:05.713141 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="e43f6d4f-3963-49e1-92a3-ee75fcac6e16" containerName="init" Oct 01 14:02:05 crc kubenswrapper[4605]: I1001 14:02:05.714649 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 01 14:02:05 crc kubenswrapper[4605]: I1001 14:02:05.717817 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Oct 01 14:02:05 crc kubenswrapper[4605]: I1001 14:02:05.726840 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 01 14:02:05 crc kubenswrapper[4605]: I1001 14:02:05.790117 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qrjkr\" (UniqueName: \"kubernetes.io/projected/ab88d000-56c4-4cba-b2e3-efb25f217503-kube-api-access-qrjkr\") pod \"glance-default-external-api-0\" (UID: \"ab88d000-56c4-4cba-b2e3-efb25f217503\") " pod="openstack/glance-default-external-api-0" Oct 01 14:02:05 crc kubenswrapper[4605]: I1001 14:02:05.790181 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ab88d000-56c4-4cba-b2e3-efb25f217503-scripts\") pod \"glance-default-external-api-0\" (UID: \"ab88d000-56c4-4cba-b2e3-efb25f217503\") " pod="openstack/glance-default-external-api-0" Oct 01 14:02:05 crc kubenswrapper[4605]: I1001 14:02:05.790207 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab88d000-56c4-4cba-b2e3-efb25f217503-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"ab88d000-56c4-4cba-b2e3-efb25f217503\") " pod="openstack/glance-default-external-api-0" Oct 01 14:02:05 crc kubenswrapper[4605]: I1001 14:02:05.790346 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ab88d000-56c4-4cba-b2e3-efb25f217503-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"ab88d000-56c4-4cba-b2e3-efb25f217503\") " pod="openstack/glance-default-external-api-0" Oct 01 14:02:05 crc kubenswrapper[4605]: I1001 14:02:05.790463 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"ab88d000-56c4-4cba-b2e3-efb25f217503\") " pod="openstack/glance-default-external-api-0" Oct 01 14:02:05 crc kubenswrapper[4605]: I1001 14:02:05.790579 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab88d000-56c4-4cba-b2e3-efb25f217503-config-data\") pod \"glance-default-external-api-0\" (UID: \"ab88d000-56c4-4cba-b2e3-efb25f217503\") " pod="openstack/glance-default-external-api-0" Oct 01 14:02:05 crc kubenswrapper[4605]: I1001 14:02:05.790690 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ab88d000-56c4-4cba-b2e3-efb25f217503-logs\") pod \"glance-default-external-api-0\" (UID: \"ab88d000-56c4-4cba-b2e3-efb25f217503\") " pod="openstack/glance-default-external-api-0" Oct 01 14:02:05 crc kubenswrapper[4605]: I1001 14:02:05.892223 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"ab88d000-56c4-4cba-b2e3-efb25f217503\") " pod="openstack/glance-default-external-api-0" Oct 01 14:02:05 crc kubenswrapper[4605]: I1001 14:02:05.892280 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab88d000-56c4-4cba-b2e3-efb25f217503-config-data\") pod \"glance-default-external-api-0\" (UID: \"ab88d000-56c4-4cba-b2e3-efb25f217503\") " pod="openstack/glance-default-external-api-0" Oct 01 14:02:05 crc kubenswrapper[4605]: I1001 14:02:05.892314 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ab88d000-56c4-4cba-b2e3-efb25f217503-logs\") pod \"glance-default-external-api-0\" (UID: \"ab88d000-56c4-4cba-b2e3-efb25f217503\") " pod="openstack/glance-default-external-api-0" Oct 01 14:02:05 crc kubenswrapper[4605]: I1001 14:02:05.892370 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qrjkr\" (UniqueName: \"kubernetes.io/projected/ab88d000-56c4-4cba-b2e3-efb25f217503-kube-api-access-qrjkr\") pod \"glance-default-external-api-0\" (UID: \"ab88d000-56c4-4cba-b2e3-efb25f217503\") " pod="openstack/glance-default-external-api-0" Oct 01 14:02:05 crc kubenswrapper[4605]: I1001 14:02:05.892389 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ab88d000-56c4-4cba-b2e3-efb25f217503-scripts\") pod \"glance-default-external-api-0\" (UID: \"ab88d000-56c4-4cba-b2e3-efb25f217503\") " pod="openstack/glance-default-external-api-0" Oct 01 14:02:05 crc kubenswrapper[4605]: I1001 14:02:05.892410 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab88d000-56c4-4cba-b2e3-efb25f217503-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"ab88d000-56c4-4cba-b2e3-efb25f217503\") " pod="openstack/glance-default-external-api-0" Oct 01 14:02:05 crc kubenswrapper[4605]: I1001 14:02:05.892428 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ab88d000-56c4-4cba-b2e3-efb25f217503-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"ab88d000-56c4-4cba-b2e3-efb25f217503\") " pod="openstack/glance-default-external-api-0" Oct 01 14:02:05 crc kubenswrapper[4605]: I1001 14:02:05.892800 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ab88d000-56c4-4cba-b2e3-efb25f217503-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"ab88d000-56c4-4cba-b2e3-efb25f217503\") " pod="openstack/glance-default-external-api-0" Oct 01 14:02:05 crc kubenswrapper[4605]: I1001 14:02:05.893408 4605 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"ab88d000-56c4-4cba-b2e3-efb25f217503\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/glance-default-external-api-0" Oct 01 14:02:05 crc kubenswrapper[4605]: I1001 14:02:05.893642 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ab88d000-56c4-4cba-b2e3-efb25f217503-logs\") pod \"glance-default-external-api-0\" (UID: \"ab88d000-56c4-4cba-b2e3-efb25f217503\") " pod="openstack/glance-default-external-api-0" Oct 01 14:02:05 crc kubenswrapper[4605]: I1001 14:02:05.919871 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab88d000-56c4-4cba-b2e3-efb25f217503-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"ab88d000-56c4-4cba-b2e3-efb25f217503\") " pod="openstack/glance-default-external-api-0" Oct 01 14:02:05 crc kubenswrapper[4605]: I1001 14:02:05.933210 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"ab88d000-56c4-4cba-b2e3-efb25f217503\") " pod="openstack/glance-default-external-api-0" Oct 01 14:02:05 crc kubenswrapper[4605]: I1001 14:02:05.962015 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qrjkr\" (UniqueName: \"kubernetes.io/projected/ab88d000-56c4-4cba-b2e3-efb25f217503-kube-api-access-qrjkr\") pod \"glance-default-external-api-0\" (UID: \"ab88d000-56c4-4cba-b2e3-efb25f217503\") " pod="openstack/glance-default-external-api-0" Oct 01 14:02:05 crc kubenswrapper[4605]: I1001 14:02:05.962642 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ab88d000-56c4-4cba-b2e3-efb25f217503-scripts\") pod \"glance-default-external-api-0\" (UID: \"ab88d000-56c4-4cba-b2e3-efb25f217503\") " pod="openstack/glance-default-external-api-0" Oct 01 14:02:05 crc kubenswrapper[4605]: I1001 14:02:05.963272 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab88d000-56c4-4cba-b2e3-efb25f217503-config-data\") pod \"glance-default-external-api-0\" (UID: \"ab88d000-56c4-4cba-b2e3-efb25f217503\") " pod="openstack/glance-default-external-api-0" Oct 01 14:02:05 crc kubenswrapper[4605]: I1001 14:02:05.966900 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e08f5d59-4b00-48d5-8c38-6bbdcd91447b" path="/var/lib/kubelet/pods/e08f5d59-4b00-48d5-8c38-6bbdcd91447b/volumes" Oct 01 14:02:05 crc kubenswrapper[4605]: I1001 14:02:05.970861 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e43f6d4f-3963-49e1-92a3-ee75fcac6e16" path="/var/lib/kubelet/pods/e43f6d4f-3963-49e1-92a3-ee75fcac6e16/volumes" Oct 01 14:02:06 crc kubenswrapper[4605]: I1001 14:02:06.062139 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 01 14:02:06 crc kubenswrapper[4605]: I1001 14:02:06.367478 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b5c85b87-2dzk7" event={"ID":"693b90b6-22ae-41d7-8a93-3d854bc9fd81","Type":"ContainerStarted","Data":"6803302be87a5e09d672a89130ed5394675d83c8eb6cc499b9768f39e93d61c4"} Oct 01 14:02:06 crc kubenswrapper[4605]: I1001 14:02:06.368059 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-8b5c85b87-2dzk7" Oct 01 14:02:06 crc kubenswrapper[4605]: I1001 14:02:06.369044 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"77da7acd-19f2-4a61-920d-d341bd5a4598","Type":"ContainerStarted","Data":"3529b92f4a77ab215542d9aa999a78e8b2f783f90c0341852ada92b8513188e5"} Oct 01 14:02:06 crc kubenswrapper[4605]: I1001 14:02:06.412054 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-8b5c85b87-2dzk7" podStartSLOduration=4.412029531 podStartE2EDuration="4.412029531s" podCreationTimestamp="2025-10-01 14:02:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 14:02:06.405080314 +0000 UTC m=+1049.149056532" watchObservedRunningTime="2025-10-01 14:02:06.412029531 +0000 UTC m=+1049.156005739" Oct 01 14:02:06 crc kubenswrapper[4605]: I1001 14:02:06.889226 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 01 14:02:06 crc kubenswrapper[4605]: W1001 14:02:06.957615 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podab88d000_56c4_4cba_b2e3_efb25f217503.slice/crio-47e71b574c66bd35bf96c22320261ed136a92e39c23334a9ff43b465ccda917f WatchSource:0}: Error finding container 47e71b574c66bd35bf96c22320261ed136a92e39c23334a9ff43b465ccda917f: Status 404 returned error can't find the container with id 47e71b574c66bd35bf96c22320261ed136a92e39c23334a9ff43b465ccda917f Oct 01 14:02:07 crc kubenswrapper[4605]: I1001 14:02:07.031538 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-xj5tw"] Oct 01 14:02:07 crc kubenswrapper[4605]: I1001 14:02:07.032891 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-xj5tw" Oct 01 14:02:07 crc kubenswrapper[4605]: I1001 14:02:07.040401 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-mxktz" Oct 01 14:02:07 crc kubenswrapper[4605]: I1001 14:02:07.045217 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Oct 01 14:02:07 crc kubenswrapper[4605]: I1001 14:02:07.054093 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-xj5tw"] Oct 01 14:02:07 crc kubenswrapper[4605]: I1001 14:02:07.231157 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8f994e1-7878-41ab-b619-4946d957e710-combined-ca-bundle\") pod \"barbican-db-sync-xj5tw\" (UID: \"e8f994e1-7878-41ab-b619-4946d957e710\") " pod="openstack/barbican-db-sync-xj5tw" Oct 01 14:02:07 crc kubenswrapper[4605]: I1001 14:02:07.231297 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e8f994e1-7878-41ab-b619-4946d957e710-db-sync-config-data\") pod \"barbican-db-sync-xj5tw\" (UID: \"e8f994e1-7878-41ab-b619-4946d957e710\") " pod="openstack/barbican-db-sync-xj5tw" Oct 01 14:02:07 crc kubenswrapper[4605]: I1001 14:02:07.231337 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mrznj\" (UniqueName: \"kubernetes.io/projected/e8f994e1-7878-41ab-b619-4946d957e710-kube-api-access-mrznj\") pod \"barbican-db-sync-xj5tw\" (UID: \"e8f994e1-7878-41ab-b619-4946d957e710\") " pod="openstack/barbican-db-sync-xj5tw" Oct 01 14:02:07 crc kubenswrapper[4605]: I1001 14:02:07.268277 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-bkj69"] Oct 01 14:02:07 crc kubenswrapper[4605]: I1001 14:02:07.269455 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-bkj69" Oct 01 14:02:07 crc kubenswrapper[4605]: I1001 14:02:07.273042 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Oct 01 14:02:07 crc kubenswrapper[4605]: I1001 14:02:07.273087 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-5c4mb" Oct 01 14:02:07 crc kubenswrapper[4605]: I1001 14:02:07.275734 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Oct 01 14:02:07 crc kubenswrapper[4605]: I1001 14:02:07.298440 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-bkj69"] Oct 01 14:02:07 crc kubenswrapper[4605]: I1001 14:02:07.339290 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mrznj\" (UniqueName: \"kubernetes.io/projected/e8f994e1-7878-41ab-b619-4946d957e710-kube-api-access-mrznj\") pod \"barbican-db-sync-xj5tw\" (UID: \"e8f994e1-7878-41ab-b619-4946d957e710\") " pod="openstack/barbican-db-sync-xj5tw" Oct 01 14:02:07 crc kubenswrapper[4605]: I1001 14:02:07.339697 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8f994e1-7878-41ab-b619-4946d957e710-combined-ca-bundle\") pod \"barbican-db-sync-xj5tw\" (UID: \"e8f994e1-7878-41ab-b619-4946d957e710\") " pod="openstack/barbican-db-sync-xj5tw" Oct 01 14:02:07 crc kubenswrapper[4605]: I1001 14:02:07.340394 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f17ed625-db31-40c7-9958-f4b89f66ffa0-combined-ca-bundle\") pod \"neutron-db-sync-bkj69\" (UID: \"f17ed625-db31-40c7-9958-f4b89f66ffa0\") " pod="openstack/neutron-db-sync-bkj69" Oct 01 14:02:07 crc kubenswrapper[4605]: I1001 14:02:07.340431 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e8f994e1-7878-41ab-b619-4946d957e710-db-sync-config-data\") pod \"barbican-db-sync-xj5tw\" (UID: \"e8f994e1-7878-41ab-b619-4946d957e710\") " pod="openstack/barbican-db-sync-xj5tw" Oct 01 14:02:07 crc kubenswrapper[4605]: I1001 14:02:07.340450 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/f17ed625-db31-40c7-9958-f4b89f66ffa0-config\") pod \"neutron-db-sync-bkj69\" (UID: \"f17ed625-db31-40c7-9958-f4b89f66ffa0\") " pod="openstack/neutron-db-sync-bkj69" Oct 01 14:02:07 crc kubenswrapper[4605]: I1001 14:02:07.340475 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9bvjd\" (UniqueName: \"kubernetes.io/projected/f17ed625-db31-40c7-9958-f4b89f66ffa0-kube-api-access-9bvjd\") pod \"neutron-db-sync-bkj69\" (UID: \"f17ed625-db31-40c7-9958-f4b89f66ffa0\") " pod="openstack/neutron-db-sync-bkj69" Oct 01 14:02:07 crc kubenswrapper[4605]: I1001 14:02:07.348387 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e8f994e1-7878-41ab-b619-4946d957e710-db-sync-config-data\") pod \"barbican-db-sync-xj5tw\" (UID: \"e8f994e1-7878-41ab-b619-4946d957e710\") " pod="openstack/barbican-db-sync-xj5tw" Oct 01 14:02:07 crc kubenswrapper[4605]: I1001 14:02:07.351841 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8f994e1-7878-41ab-b619-4946d957e710-combined-ca-bundle\") pod \"barbican-db-sync-xj5tw\" (UID: \"e8f994e1-7878-41ab-b619-4946d957e710\") " pod="openstack/barbican-db-sync-xj5tw" Oct 01 14:02:07 crc kubenswrapper[4605]: I1001 14:02:07.380201 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mrznj\" (UniqueName: \"kubernetes.io/projected/e8f994e1-7878-41ab-b619-4946d957e710-kube-api-access-mrznj\") pod \"barbican-db-sync-xj5tw\" (UID: \"e8f994e1-7878-41ab-b619-4946d957e710\") " pod="openstack/barbican-db-sync-xj5tw" Oct 01 14:02:07 crc kubenswrapper[4605]: I1001 14:02:07.402082 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ab88d000-56c4-4cba-b2e3-efb25f217503","Type":"ContainerStarted","Data":"47e71b574c66bd35bf96c22320261ed136a92e39c23334a9ff43b465ccda917f"} Oct 01 14:02:07 crc kubenswrapper[4605]: I1001 14:02:07.405271 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"77da7acd-19f2-4a61-920d-d341bd5a4598","Type":"ContainerStarted","Data":"91bb02d320f138cd86487aa8949cbbcc68cbf3d09324bc4a2556d5a39d44b11f"} Oct 01 14:02:07 crc kubenswrapper[4605]: I1001 14:02:07.441424 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f17ed625-db31-40c7-9958-f4b89f66ffa0-combined-ca-bundle\") pod \"neutron-db-sync-bkj69\" (UID: \"f17ed625-db31-40c7-9958-f4b89f66ffa0\") " pod="openstack/neutron-db-sync-bkj69" Oct 01 14:02:07 crc kubenswrapper[4605]: I1001 14:02:07.441754 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/f17ed625-db31-40c7-9958-f4b89f66ffa0-config\") pod \"neutron-db-sync-bkj69\" (UID: \"f17ed625-db31-40c7-9958-f4b89f66ffa0\") " pod="openstack/neutron-db-sync-bkj69" Oct 01 14:02:07 crc kubenswrapper[4605]: I1001 14:02:07.441782 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9bvjd\" (UniqueName: \"kubernetes.io/projected/f17ed625-db31-40c7-9958-f4b89f66ffa0-kube-api-access-9bvjd\") pod \"neutron-db-sync-bkj69\" (UID: \"f17ed625-db31-40c7-9958-f4b89f66ffa0\") " pod="openstack/neutron-db-sync-bkj69" Oct 01 14:02:07 crc kubenswrapper[4605]: I1001 14:02:07.449371 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f17ed625-db31-40c7-9958-f4b89f66ffa0-combined-ca-bundle\") pod \"neutron-db-sync-bkj69\" (UID: \"f17ed625-db31-40c7-9958-f4b89f66ffa0\") " pod="openstack/neutron-db-sync-bkj69" Oct 01 14:02:07 crc kubenswrapper[4605]: I1001 14:02:07.450489 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/f17ed625-db31-40c7-9958-f4b89f66ffa0-config\") pod \"neutron-db-sync-bkj69\" (UID: \"f17ed625-db31-40c7-9958-f4b89f66ffa0\") " pod="openstack/neutron-db-sync-bkj69" Oct 01 14:02:07 crc kubenswrapper[4605]: I1001 14:02:07.451051 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-xj5tw" Oct 01 14:02:07 crc kubenswrapper[4605]: I1001 14:02:07.461037 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9bvjd\" (UniqueName: \"kubernetes.io/projected/f17ed625-db31-40c7-9958-f4b89f66ffa0-kube-api-access-9bvjd\") pod \"neutron-db-sync-bkj69\" (UID: \"f17ed625-db31-40c7-9958-f4b89f66ffa0\") " pod="openstack/neutron-db-sync-bkj69" Oct 01 14:02:07 crc kubenswrapper[4605]: I1001 14:02:07.596263 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-bkj69" Oct 01 14:02:08 crc kubenswrapper[4605]: I1001 14:02:08.454188 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ab88d000-56c4-4cba-b2e3-efb25f217503","Type":"ContainerStarted","Data":"adc05c7cfe78773415b1f63f63cbc496daf588464d17aa7271edbd1f5222cf42"} Oct 01 14:02:08 crc kubenswrapper[4605]: I1001 14:02:08.457549 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"77da7acd-19f2-4a61-920d-d341bd5a4598","Type":"ContainerStarted","Data":"6a329f5fe313bbe6ee29f4bcaa54a5e91d6df26eb5e466edea66faede6e30ddb"} Oct 01 14:02:08 crc kubenswrapper[4605]: I1001 14:02:08.492328 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=4.492307643 podStartE2EDuration="4.492307643s" podCreationTimestamp="2025-10-01 14:02:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 14:02:08.485731647 +0000 UTC m=+1051.229707855" watchObservedRunningTime="2025-10-01 14:02:08.492307643 +0000 UTC m=+1051.236283851" Oct 01 14:02:08 crc kubenswrapper[4605]: I1001 14:02:08.540492 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-xj5tw"] Oct 01 14:02:08 crc kubenswrapper[4605]: W1001 14:02:08.546392 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode8f994e1_7878_41ab_b619_4946d957e710.slice/crio-ea75477a7a647525ad692b9bb62d5eb9017e34db832e284c2dd8c8cd8f532ccc WatchSource:0}: Error finding container ea75477a7a647525ad692b9bb62d5eb9017e34db832e284c2dd8c8cd8f532ccc: Status 404 returned error can't find the container with id ea75477a7a647525ad692b9bb62d5eb9017e34db832e284c2dd8c8cd8f532ccc Oct 01 14:02:08 crc kubenswrapper[4605]: I1001 14:02:08.720851 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-bkj69"] Oct 01 14:02:09 crc kubenswrapper[4605]: I1001 14:02:09.473043 4605 generic.go:334] "Generic (PLEG): container finished" podID="0aafc049-c1db-4e02-a3fa-07274790c6f6" containerID="57cf237c4829da4dbf12faad24a5e9bf332462eb8be5ae8c2d20f53919726afa" exitCode=0 Oct 01 14:02:09 crc kubenswrapper[4605]: I1001 14:02:09.473149 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-5qt7p" event={"ID":"0aafc049-c1db-4e02-a3fa-07274790c6f6","Type":"ContainerDied","Data":"57cf237c4829da4dbf12faad24a5e9bf332462eb8be5ae8c2d20f53919726afa"} Oct 01 14:02:09 crc kubenswrapper[4605]: I1001 14:02:09.478697 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ab88d000-56c4-4cba-b2e3-efb25f217503","Type":"ContainerStarted","Data":"984b301f47a273f20907e1126f16f318a05f5bbd8ac04a26a059602d9054e314"} Oct 01 14:02:09 crc kubenswrapper[4605]: I1001 14:02:09.481084 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-xj5tw" event={"ID":"e8f994e1-7878-41ab-b619-4946d957e710","Type":"ContainerStarted","Data":"ea75477a7a647525ad692b9bb62d5eb9017e34db832e284c2dd8c8cd8f532ccc"} Oct 01 14:02:10 crc kubenswrapper[4605]: I1001 14:02:10.528310 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=5.528289791 podStartE2EDuration="5.528289791s" podCreationTimestamp="2025-10-01 14:02:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 14:02:10.52349009 +0000 UTC m=+1053.267466298" watchObservedRunningTime="2025-10-01 14:02:10.528289791 +0000 UTC m=+1053.272265999" Oct 01 14:02:12 crc kubenswrapper[4605]: I1001 14:02:12.152352 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 01 14:02:12 crc kubenswrapper[4605]: I1001 14:02:12.153803 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="ab88d000-56c4-4cba-b2e3-efb25f217503" containerName="glance-log" containerID="cri-o://adc05c7cfe78773415b1f63f63cbc496daf588464d17aa7271edbd1f5222cf42" gracePeriod=30 Oct 01 14:02:12 crc kubenswrapper[4605]: I1001 14:02:12.154503 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="ab88d000-56c4-4cba-b2e3-efb25f217503" containerName="glance-httpd" containerID="cri-o://984b301f47a273f20907e1126f16f318a05f5bbd8ac04a26a059602d9054e314" gracePeriod=30 Oct 01 14:02:12 crc kubenswrapper[4605]: I1001 14:02:12.228639 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 01 14:02:12 crc kubenswrapper[4605]: I1001 14:02:12.228902 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="77da7acd-19f2-4a61-920d-d341bd5a4598" containerName="glance-log" containerID="cri-o://91bb02d320f138cd86487aa8949cbbcc68cbf3d09324bc4a2556d5a39d44b11f" gracePeriod=30 Oct 01 14:02:12 crc kubenswrapper[4605]: I1001 14:02:12.229179 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="77da7acd-19f2-4a61-920d-d341bd5a4598" containerName="glance-httpd" containerID="cri-o://6a329f5fe313bbe6ee29f4bcaa54a5e91d6df26eb5e466edea66faede6e30ddb" gracePeriod=30 Oct 01 14:02:12 crc kubenswrapper[4605]: I1001 14:02:12.571080 4605 generic.go:334] "Generic (PLEG): container finished" podID="77da7acd-19f2-4a61-920d-d341bd5a4598" containerID="6a329f5fe313bbe6ee29f4bcaa54a5e91d6df26eb5e466edea66faede6e30ddb" exitCode=0 Oct 01 14:02:12 crc kubenswrapper[4605]: I1001 14:02:12.571143 4605 generic.go:334] "Generic (PLEG): container finished" podID="77da7acd-19f2-4a61-920d-d341bd5a4598" containerID="91bb02d320f138cd86487aa8949cbbcc68cbf3d09324bc4a2556d5a39d44b11f" exitCode=143 Oct 01 14:02:12 crc kubenswrapper[4605]: I1001 14:02:12.571222 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"77da7acd-19f2-4a61-920d-d341bd5a4598","Type":"ContainerDied","Data":"6a329f5fe313bbe6ee29f4bcaa54a5e91d6df26eb5e466edea66faede6e30ddb"} Oct 01 14:02:12 crc kubenswrapper[4605]: I1001 14:02:12.571249 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"77da7acd-19f2-4a61-920d-d341bd5a4598","Type":"ContainerDied","Data":"91bb02d320f138cd86487aa8949cbbcc68cbf3d09324bc4a2556d5a39d44b11f"} Oct 01 14:02:12 crc kubenswrapper[4605]: I1001 14:02:12.594940 4605 generic.go:334] "Generic (PLEG): container finished" podID="ab88d000-56c4-4cba-b2e3-efb25f217503" containerID="984b301f47a273f20907e1126f16f318a05f5bbd8ac04a26a059602d9054e314" exitCode=0 Oct 01 14:02:12 crc kubenswrapper[4605]: I1001 14:02:12.594984 4605 generic.go:334] "Generic (PLEG): container finished" podID="ab88d000-56c4-4cba-b2e3-efb25f217503" containerID="adc05c7cfe78773415b1f63f63cbc496daf588464d17aa7271edbd1f5222cf42" exitCode=143 Oct 01 14:02:12 crc kubenswrapper[4605]: I1001 14:02:12.595005 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ab88d000-56c4-4cba-b2e3-efb25f217503","Type":"ContainerDied","Data":"984b301f47a273f20907e1126f16f318a05f5bbd8ac04a26a059602d9054e314"} Oct 01 14:02:12 crc kubenswrapper[4605]: I1001 14:02:12.595031 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ab88d000-56c4-4cba-b2e3-efb25f217503","Type":"ContainerDied","Data":"adc05c7cfe78773415b1f63f63cbc496daf588464d17aa7271edbd1f5222cf42"} Oct 01 14:02:13 crc kubenswrapper[4605]: I1001 14:02:13.111046 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-f76447ddf-ghwkf"] Oct 01 14:02:13 crc kubenswrapper[4605]: I1001 14:02:13.170449 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-749477d64b-5dpnm"] Oct 01 14:02:13 crc kubenswrapper[4605]: I1001 14:02:13.183990 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-749477d64b-5dpnm" Oct 01 14:02:13 crc kubenswrapper[4605]: I1001 14:02:13.188174 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-horizon-svc" Oct 01 14:02:13 crc kubenswrapper[4605]: I1001 14:02:13.199070 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-749477d64b-5dpnm"] Oct 01 14:02:13 crc kubenswrapper[4605]: I1001 14:02:13.245638 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7997989dbf-ht8bk"] Oct 01 14:02:13 crc kubenswrapper[4605]: I1001 14:02:13.287197 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-64d6df575b-5ctbf"] Oct 01 14:02:13 crc kubenswrapper[4605]: I1001 14:02:13.294793 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-64d6df575b-5ctbf" Oct 01 14:02:13 crc kubenswrapper[4605]: I1001 14:02:13.304745 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-64d6df575b-5ctbf"] Oct 01 14:02:13 crc kubenswrapper[4605]: I1001 14:02:13.316971 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19af3ac5-8b1a-4301-88a2-96ae085ee9e0-combined-ca-bundle\") pod \"horizon-749477d64b-5dpnm\" (UID: \"19af3ac5-8b1a-4301-88a2-96ae085ee9e0\") " pod="openstack/horizon-749477d64b-5dpnm" Oct 01 14:02:13 crc kubenswrapper[4605]: I1001 14:02:13.317059 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/19af3ac5-8b1a-4301-88a2-96ae085ee9e0-config-data\") pod \"horizon-749477d64b-5dpnm\" (UID: \"19af3ac5-8b1a-4301-88a2-96ae085ee9e0\") " pod="openstack/horizon-749477d64b-5dpnm" Oct 01 14:02:13 crc kubenswrapper[4605]: I1001 14:02:13.317093 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/19af3ac5-8b1a-4301-88a2-96ae085ee9e0-logs\") pod \"horizon-749477d64b-5dpnm\" (UID: \"19af3ac5-8b1a-4301-88a2-96ae085ee9e0\") " pod="openstack/horizon-749477d64b-5dpnm" Oct 01 14:02:13 crc kubenswrapper[4605]: I1001 14:02:13.317146 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wwpd2\" (UniqueName: \"kubernetes.io/projected/19af3ac5-8b1a-4301-88a2-96ae085ee9e0-kube-api-access-wwpd2\") pod \"horizon-749477d64b-5dpnm\" (UID: \"19af3ac5-8b1a-4301-88a2-96ae085ee9e0\") " pod="openstack/horizon-749477d64b-5dpnm" Oct 01 14:02:13 crc kubenswrapper[4605]: I1001 14:02:13.317178 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/19af3ac5-8b1a-4301-88a2-96ae085ee9e0-horizon-secret-key\") pod \"horizon-749477d64b-5dpnm\" (UID: \"19af3ac5-8b1a-4301-88a2-96ae085ee9e0\") " pod="openstack/horizon-749477d64b-5dpnm" Oct 01 14:02:13 crc kubenswrapper[4605]: I1001 14:02:13.317197 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/19af3ac5-8b1a-4301-88a2-96ae085ee9e0-horizon-tls-certs\") pod \"horizon-749477d64b-5dpnm\" (UID: \"19af3ac5-8b1a-4301-88a2-96ae085ee9e0\") " pod="openstack/horizon-749477d64b-5dpnm" Oct 01 14:02:13 crc kubenswrapper[4605]: I1001 14:02:13.317221 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/19af3ac5-8b1a-4301-88a2-96ae085ee9e0-scripts\") pod \"horizon-749477d64b-5dpnm\" (UID: \"19af3ac5-8b1a-4301-88a2-96ae085ee9e0\") " pod="openstack/horizon-749477d64b-5dpnm" Oct 01 14:02:13 crc kubenswrapper[4605]: I1001 14:02:13.419268 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vzd79\" (UniqueName: \"kubernetes.io/projected/80fb1c51-bd86-4896-8dac-59747473f066-kube-api-access-vzd79\") pod \"horizon-64d6df575b-5ctbf\" (UID: \"80fb1c51-bd86-4896-8dac-59747473f066\") " pod="openstack/horizon-64d6df575b-5ctbf" Oct 01 14:02:13 crc kubenswrapper[4605]: I1001 14:02:13.419365 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/80fb1c51-bd86-4896-8dac-59747473f066-logs\") pod \"horizon-64d6df575b-5ctbf\" (UID: \"80fb1c51-bd86-4896-8dac-59747473f066\") " pod="openstack/horizon-64d6df575b-5ctbf" Oct 01 14:02:13 crc kubenswrapper[4605]: I1001 14:02:13.419408 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19af3ac5-8b1a-4301-88a2-96ae085ee9e0-combined-ca-bundle\") pod \"horizon-749477d64b-5dpnm\" (UID: \"19af3ac5-8b1a-4301-88a2-96ae085ee9e0\") " pod="openstack/horizon-749477d64b-5dpnm" Oct 01 14:02:13 crc kubenswrapper[4605]: I1001 14:02:13.419451 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/19af3ac5-8b1a-4301-88a2-96ae085ee9e0-config-data\") pod \"horizon-749477d64b-5dpnm\" (UID: \"19af3ac5-8b1a-4301-88a2-96ae085ee9e0\") " pod="openstack/horizon-749477d64b-5dpnm" Oct 01 14:02:13 crc kubenswrapper[4605]: I1001 14:02:13.419488 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/80fb1c51-bd86-4896-8dac-59747473f066-horizon-secret-key\") pod \"horizon-64d6df575b-5ctbf\" (UID: \"80fb1c51-bd86-4896-8dac-59747473f066\") " pod="openstack/horizon-64d6df575b-5ctbf" Oct 01 14:02:13 crc kubenswrapper[4605]: I1001 14:02:13.419512 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/19af3ac5-8b1a-4301-88a2-96ae085ee9e0-logs\") pod \"horizon-749477d64b-5dpnm\" (UID: \"19af3ac5-8b1a-4301-88a2-96ae085ee9e0\") " pod="openstack/horizon-749477d64b-5dpnm" Oct 01 14:02:13 crc kubenswrapper[4605]: I1001 14:02:13.419569 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/80fb1c51-bd86-4896-8dac-59747473f066-scripts\") pod \"horizon-64d6df575b-5ctbf\" (UID: \"80fb1c51-bd86-4896-8dac-59747473f066\") " pod="openstack/horizon-64d6df575b-5ctbf" Oct 01 14:02:13 crc kubenswrapper[4605]: I1001 14:02:13.419598 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wwpd2\" (UniqueName: \"kubernetes.io/projected/19af3ac5-8b1a-4301-88a2-96ae085ee9e0-kube-api-access-wwpd2\") pod \"horizon-749477d64b-5dpnm\" (UID: \"19af3ac5-8b1a-4301-88a2-96ae085ee9e0\") " pod="openstack/horizon-749477d64b-5dpnm" Oct 01 14:02:13 crc kubenswrapper[4605]: I1001 14:02:13.419647 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/80fb1c51-bd86-4896-8dac-59747473f066-horizon-tls-certs\") pod \"horizon-64d6df575b-5ctbf\" (UID: \"80fb1c51-bd86-4896-8dac-59747473f066\") " pod="openstack/horizon-64d6df575b-5ctbf" Oct 01 14:02:13 crc kubenswrapper[4605]: I1001 14:02:13.419680 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/80fb1c51-bd86-4896-8dac-59747473f066-config-data\") pod \"horizon-64d6df575b-5ctbf\" (UID: \"80fb1c51-bd86-4896-8dac-59747473f066\") " pod="openstack/horizon-64d6df575b-5ctbf" Oct 01 14:02:13 crc kubenswrapper[4605]: I1001 14:02:13.419713 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80fb1c51-bd86-4896-8dac-59747473f066-combined-ca-bundle\") pod \"horizon-64d6df575b-5ctbf\" (UID: \"80fb1c51-bd86-4896-8dac-59747473f066\") " pod="openstack/horizon-64d6df575b-5ctbf" Oct 01 14:02:13 crc kubenswrapper[4605]: I1001 14:02:13.419739 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/19af3ac5-8b1a-4301-88a2-96ae085ee9e0-horizon-secret-key\") pod \"horizon-749477d64b-5dpnm\" (UID: \"19af3ac5-8b1a-4301-88a2-96ae085ee9e0\") " pod="openstack/horizon-749477d64b-5dpnm" Oct 01 14:02:13 crc kubenswrapper[4605]: I1001 14:02:13.419766 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/19af3ac5-8b1a-4301-88a2-96ae085ee9e0-horizon-tls-certs\") pod \"horizon-749477d64b-5dpnm\" (UID: \"19af3ac5-8b1a-4301-88a2-96ae085ee9e0\") " pod="openstack/horizon-749477d64b-5dpnm" Oct 01 14:02:13 crc kubenswrapper[4605]: I1001 14:02:13.419802 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/19af3ac5-8b1a-4301-88a2-96ae085ee9e0-scripts\") pod \"horizon-749477d64b-5dpnm\" (UID: \"19af3ac5-8b1a-4301-88a2-96ae085ee9e0\") " pod="openstack/horizon-749477d64b-5dpnm" Oct 01 14:02:13 crc kubenswrapper[4605]: I1001 14:02:13.420271 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/19af3ac5-8b1a-4301-88a2-96ae085ee9e0-logs\") pod \"horizon-749477d64b-5dpnm\" (UID: \"19af3ac5-8b1a-4301-88a2-96ae085ee9e0\") " pod="openstack/horizon-749477d64b-5dpnm" Oct 01 14:02:13 crc kubenswrapper[4605]: I1001 14:02:13.420758 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/19af3ac5-8b1a-4301-88a2-96ae085ee9e0-scripts\") pod \"horizon-749477d64b-5dpnm\" (UID: \"19af3ac5-8b1a-4301-88a2-96ae085ee9e0\") " pod="openstack/horizon-749477d64b-5dpnm" Oct 01 14:02:13 crc kubenswrapper[4605]: I1001 14:02:13.420880 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/19af3ac5-8b1a-4301-88a2-96ae085ee9e0-config-data\") pod \"horizon-749477d64b-5dpnm\" (UID: \"19af3ac5-8b1a-4301-88a2-96ae085ee9e0\") " pod="openstack/horizon-749477d64b-5dpnm" Oct 01 14:02:13 crc kubenswrapper[4605]: I1001 14:02:13.429925 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19af3ac5-8b1a-4301-88a2-96ae085ee9e0-combined-ca-bundle\") pod \"horizon-749477d64b-5dpnm\" (UID: \"19af3ac5-8b1a-4301-88a2-96ae085ee9e0\") " pod="openstack/horizon-749477d64b-5dpnm" Oct 01 14:02:13 crc kubenswrapper[4605]: I1001 14:02:13.430435 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/19af3ac5-8b1a-4301-88a2-96ae085ee9e0-horizon-tls-certs\") pod \"horizon-749477d64b-5dpnm\" (UID: \"19af3ac5-8b1a-4301-88a2-96ae085ee9e0\") " pod="openstack/horizon-749477d64b-5dpnm" Oct 01 14:02:13 crc kubenswrapper[4605]: I1001 14:02:13.440253 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/19af3ac5-8b1a-4301-88a2-96ae085ee9e0-horizon-secret-key\") pod \"horizon-749477d64b-5dpnm\" (UID: \"19af3ac5-8b1a-4301-88a2-96ae085ee9e0\") " pod="openstack/horizon-749477d64b-5dpnm" Oct 01 14:02:13 crc kubenswrapper[4605]: I1001 14:02:13.449137 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wwpd2\" (UniqueName: \"kubernetes.io/projected/19af3ac5-8b1a-4301-88a2-96ae085ee9e0-kube-api-access-wwpd2\") pod \"horizon-749477d64b-5dpnm\" (UID: \"19af3ac5-8b1a-4301-88a2-96ae085ee9e0\") " pod="openstack/horizon-749477d64b-5dpnm" Oct 01 14:02:13 crc kubenswrapper[4605]: I1001 14:02:13.497294 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-8b5c85b87-2dzk7" Oct 01 14:02:13 crc kubenswrapper[4605]: I1001 14:02:13.521969 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80fb1c51-bd86-4896-8dac-59747473f066-combined-ca-bundle\") pod \"horizon-64d6df575b-5ctbf\" (UID: \"80fb1c51-bd86-4896-8dac-59747473f066\") " pod="openstack/horizon-64d6df575b-5ctbf" Oct 01 14:02:13 crc kubenswrapper[4605]: I1001 14:02:13.522087 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vzd79\" (UniqueName: \"kubernetes.io/projected/80fb1c51-bd86-4896-8dac-59747473f066-kube-api-access-vzd79\") pod \"horizon-64d6df575b-5ctbf\" (UID: \"80fb1c51-bd86-4896-8dac-59747473f066\") " pod="openstack/horizon-64d6df575b-5ctbf" Oct 01 14:02:13 crc kubenswrapper[4605]: I1001 14:02:13.522140 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/80fb1c51-bd86-4896-8dac-59747473f066-logs\") pod \"horizon-64d6df575b-5ctbf\" (UID: \"80fb1c51-bd86-4896-8dac-59747473f066\") " pod="openstack/horizon-64d6df575b-5ctbf" Oct 01 14:02:13 crc kubenswrapper[4605]: I1001 14:02:13.522181 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/80fb1c51-bd86-4896-8dac-59747473f066-horizon-secret-key\") pod \"horizon-64d6df575b-5ctbf\" (UID: \"80fb1c51-bd86-4896-8dac-59747473f066\") " pod="openstack/horizon-64d6df575b-5ctbf" Oct 01 14:02:13 crc kubenswrapper[4605]: I1001 14:02:13.522217 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/80fb1c51-bd86-4896-8dac-59747473f066-scripts\") pod \"horizon-64d6df575b-5ctbf\" (UID: \"80fb1c51-bd86-4896-8dac-59747473f066\") " pod="openstack/horizon-64d6df575b-5ctbf" Oct 01 14:02:13 crc kubenswrapper[4605]: I1001 14:02:13.522240 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/80fb1c51-bd86-4896-8dac-59747473f066-config-data\") pod \"horizon-64d6df575b-5ctbf\" (UID: \"80fb1c51-bd86-4896-8dac-59747473f066\") " pod="openstack/horizon-64d6df575b-5ctbf" Oct 01 14:02:13 crc kubenswrapper[4605]: I1001 14:02:13.522258 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/80fb1c51-bd86-4896-8dac-59747473f066-horizon-tls-certs\") pod \"horizon-64d6df575b-5ctbf\" (UID: \"80fb1c51-bd86-4896-8dac-59747473f066\") " pod="openstack/horizon-64d6df575b-5ctbf" Oct 01 14:02:13 crc kubenswrapper[4605]: I1001 14:02:13.522851 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/80fb1c51-bd86-4896-8dac-59747473f066-logs\") pod \"horizon-64d6df575b-5ctbf\" (UID: \"80fb1c51-bd86-4896-8dac-59747473f066\") " pod="openstack/horizon-64d6df575b-5ctbf" Oct 01 14:02:13 crc kubenswrapper[4605]: I1001 14:02:13.523743 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/80fb1c51-bd86-4896-8dac-59747473f066-scripts\") pod \"horizon-64d6df575b-5ctbf\" (UID: \"80fb1c51-bd86-4896-8dac-59747473f066\") " pod="openstack/horizon-64d6df575b-5ctbf" Oct 01 14:02:13 crc kubenswrapper[4605]: I1001 14:02:13.524803 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/80fb1c51-bd86-4896-8dac-59747473f066-config-data\") pod \"horizon-64d6df575b-5ctbf\" (UID: \"80fb1c51-bd86-4896-8dac-59747473f066\") " pod="openstack/horizon-64d6df575b-5ctbf" Oct 01 14:02:13 crc kubenswrapper[4605]: I1001 14:02:13.526251 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/80fb1c51-bd86-4896-8dac-59747473f066-horizon-secret-key\") pod \"horizon-64d6df575b-5ctbf\" (UID: \"80fb1c51-bd86-4896-8dac-59747473f066\") " pod="openstack/horizon-64d6df575b-5ctbf" Oct 01 14:02:13 crc kubenswrapper[4605]: I1001 14:02:13.526642 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-749477d64b-5dpnm" Oct 01 14:02:13 crc kubenswrapper[4605]: I1001 14:02:13.526723 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/80fb1c51-bd86-4896-8dac-59747473f066-horizon-tls-certs\") pod \"horizon-64d6df575b-5ctbf\" (UID: \"80fb1c51-bd86-4896-8dac-59747473f066\") " pod="openstack/horizon-64d6df575b-5ctbf" Oct 01 14:02:13 crc kubenswrapper[4605]: I1001 14:02:13.536827 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80fb1c51-bd86-4896-8dac-59747473f066-combined-ca-bundle\") pod \"horizon-64d6df575b-5ctbf\" (UID: \"80fb1c51-bd86-4896-8dac-59747473f066\") " pod="openstack/horizon-64d6df575b-5ctbf" Oct 01 14:02:13 crc kubenswrapper[4605]: I1001 14:02:13.564203 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vzd79\" (UniqueName: \"kubernetes.io/projected/80fb1c51-bd86-4896-8dac-59747473f066-kube-api-access-vzd79\") pod \"horizon-64d6df575b-5ctbf\" (UID: \"80fb1c51-bd86-4896-8dac-59747473f066\") " pod="openstack/horizon-64d6df575b-5ctbf" Oct 01 14:02:13 crc kubenswrapper[4605]: I1001 14:02:13.582542 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-gxlcx"] Oct 01 14:02:13 crc kubenswrapper[4605]: I1001 14:02:13.583805 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-77585f5f8c-gxlcx" podUID="bd54105c-5b1e-4706-9fe8-83cdca837998" containerName="dnsmasq-dns" containerID="cri-o://3511825df56b94b1ff17ea8a204fc8b39957674d6f9b10331cbcbacaa129dfb9" gracePeriod=10 Oct 01 14:02:13 crc kubenswrapper[4605]: I1001 14:02:13.620412 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-64d6df575b-5ctbf" Oct 01 14:02:14 crc kubenswrapper[4605]: I1001 14:02:14.617903 4605 generic.go:334] "Generic (PLEG): container finished" podID="bd54105c-5b1e-4706-9fe8-83cdca837998" containerID="3511825df56b94b1ff17ea8a204fc8b39957674d6f9b10331cbcbacaa129dfb9" exitCode=0 Oct 01 14:02:14 crc kubenswrapper[4605]: I1001 14:02:14.618324 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77585f5f8c-gxlcx" event={"ID":"bd54105c-5b1e-4706-9fe8-83cdca837998","Type":"ContainerDied","Data":"3511825df56b94b1ff17ea8a204fc8b39957674d6f9b10331cbcbacaa129dfb9"} Oct 01 14:02:18 crc kubenswrapper[4605]: I1001 14:02:18.123769 4605 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-77585f5f8c-gxlcx" podUID="bd54105c-5b1e-4706-9fe8-83cdca837998" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.131:5353: connect: connection refused" Oct 01 14:02:21 crc kubenswrapper[4605]: E1001 14:02:21.100596 4605 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-placement-api:current-podified" Oct 01 14:02:21 crc kubenswrapper[4605]: E1001 14:02:21.101256 4605 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:placement-db-sync,Image:quay.io/podified-antelope-centos9/openstack-placement-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/placement,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:false,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:placement-dbsync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-mkrtd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42482,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-db-sync-px7rk_openstack(a3bd053b-19f8-4908-b8d5-e5c0ae5599c0): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 01 14:02:21 crc kubenswrapper[4605]: E1001 14:02:21.102434 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"placement-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/placement-db-sync-px7rk" podUID="a3bd053b-19f8-4908-b8d5-e5c0ae5599c0" Oct 01 14:02:21 crc kubenswrapper[4605]: I1001 14:02:21.145642 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-5qt7p" Oct 01 14:02:21 crc kubenswrapper[4605]: I1001 14:02:21.276992 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0aafc049-c1db-4e02-a3fa-07274790c6f6-fernet-keys\") pod \"0aafc049-c1db-4e02-a3fa-07274790c6f6\" (UID: \"0aafc049-c1db-4e02-a3fa-07274790c6f6\") " Oct 01 14:02:21 crc kubenswrapper[4605]: I1001 14:02:21.277084 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0aafc049-c1db-4e02-a3fa-07274790c6f6-combined-ca-bundle\") pod \"0aafc049-c1db-4e02-a3fa-07274790c6f6\" (UID: \"0aafc049-c1db-4e02-a3fa-07274790c6f6\") " Oct 01 14:02:21 crc kubenswrapper[4605]: I1001 14:02:21.277141 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0aafc049-c1db-4e02-a3fa-07274790c6f6-config-data\") pod \"0aafc049-c1db-4e02-a3fa-07274790c6f6\" (UID: \"0aafc049-c1db-4e02-a3fa-07274790c6f6\") " Oct 01 14:02:21 crc kubenswrapper[4605]: I1001 14:02:21.277183 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w97m4\" (UniqueName: \"kubernetes.io/projected/0aafc049-c1db-4e02-a3fa-07274790c6f6-kube-api-access-w97m4\") pod \"0aafc049-c1db-4e02-a3fa-07274790c6f6\" (UID: \"0aafc049-c1db-4e02-a3fa-07274790c6f6\") " Oct 01 14:02:21 crc kubenswrapper[4605]: I1001 14:02:21.277221 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0aafc049-c1db-4e02-a3fa-07274790c6f6-scripts\") pod \"0aafc049-c1db-4e02-a3fa-07274790c6f6\" (UID: \"0aafc049-c1db-4e02-a3fa-07274790c6f6\") " Oct 01 14:02:21 crc kubenswrapper[4605]: I1001 14:02:21.277311 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/0aafc049-c1db-4e02-a3fa-07274790c6f6-credential-keys\") pod \"0aafc049-c1db-4e02-a3fa-07274790c6f6\" (UID: \"0aafc049-c1db-4e02-a3fa-07274790c6f6\") " Oct 01 14:02:21 crc kubenswrapper[4605]: I1001 14:02:21.285804 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0aafc049-c1db-4e02-a3fa-07274790c6f6-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "0aafc049-c1db-4e02-a3fa-07274790c6f6" (UID: "0aafc049-c1db-4e02-a3fa-07274790c6f6"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:02:21 crc kubenswrapper[4605]: I1001 14:02:21.285873 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0aafc049-c1db-4e02-a3fa-07274790c6f6-scripts" (OuterVolumeSpecName: "scripts") pod "0aafc049-c1db-4e02-a3fa-07274790c6f6" (UID: "0aafc049-c1db-4e02-a3fa-07274790c6f6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:02:21 crc kubenswrapper[4605]: I1001 14:02:21.286232 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0aafc049-c1db-4e02-a3fa-07274790c6f6-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "0aafc049-c1db-4e02-a3fa-07274790c6f6" (UID: "0aafc049-c1db-4e02-a3fa-07274790c6f6"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:02:21 crc kubenswrapper[4605]: I1001 14:02:21.298996 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0aafc049-c1db-4e02-a3fa-07274790c6f6-kube-api-access-w97m4" (OuterVolumeSpecName: "kube-api-access-w97m4") pod "0aafc049-c1db-4e02-a3fa-07274790c6f6" (UID: "0aafc049-c1db-4e02-a3fa-07274790c6f6"). InnerVolumeSpecName "kube-api-access-w97m4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:02:21 crc kubenswrapper[4605]: I1001 14:02:21.310162 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0aafc049-c1db-4e02-a3fa-07274790c6f6-config-data" (OuterVolumeSpecName: "config-data") pod "0aafc049-c1db-4e02-a3fa-07274790c6f6" (UID: "0aafc049-c1db-4e02-a3fa-07274790c6f6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:02:21 crc kubenswrapper[4605]: I1001 14:02:21.317541 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0aafc049-c1db-4e02-a3fa-07274790c6f6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0aafc049-c1db-4e02-a3fa-07274790c6f6" (UID: "0aafc049-c1db-4e02-a3fa-07274790c6f6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:02:21 crc kubenswrapper[4605]: I1001 14:02:21.379133 4605 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0aafc049-c1db-4e02-a3fa-07274790c6f6-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:21 crc kubenswrapper[4605]: I1001 14:02:21.379173 4605 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0aafc049-c1db-4e02-a3fa-07274790c6f6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:21 crc kubenswrapper[4605]: I1001 14:02:21.379191 4605 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0aafc049-c1db-4e02-a3fa-07274790c6f6-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:21 crc kubenswrapper[4605]: I1001 14:02:21.379202 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w97m4\" (UniqueName: \"kubernetes.io/projected/0aafc049-c1db-4e02-a3fa-07274790c6f6-kube-api-access-w97m4\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:21 crc kubenswrapper[4605]: I1001 14:02:21.379215 4605 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0aafc049-c1db-4e02-a3fa-07274790c6f6-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:21 crc kubenswrapper[4605]: I1001 14:02:21.379226 4605 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/0aafc049-c1db-4e02-a3fa-07274790c6f6-credential-keys\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:21 crc kubenswrapper[4605]: W1001 14:02:21.658305 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf17ed625_db31_40c7_9958_f4b89f66ffa0.slice/crio-83e880bcf317d527091672e01a35577addd00f2cd9712104cddd065490b937e5 WatchSource:0}: Error finding container 83e880bcf317d527091672e01a35577addd00f2cd9712104cddd065490b937e5: Status 404 returned error can't find the container with id 83e880bcf317d527091672e01a35577addd00f2cd9712104cddd065490b937e5 Oct 01 14:02:21 crc kubenswrapper[4605]: I1001 14:02:21.678770 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-bkj69" event={"ID":"f17ed625-db31-40c7-9958-f4b89f66ffa0","Type":"ContainerStarted","Data":"83e880bcf317d527091672e01a35577addd00f2cd9712104cddd065490b937e5"} Oct 01 14:02:21 crc kubenswrapper[4605]: I1001 14:02:21.680364 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-5qt7p" Oct 01 14:02:21 crc kubenswrapper[4605]: I1001 14:02:21.680411 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-5qt7p" event={"ID":"0aafc049-c1db-4e02-a3fa-07274790c6f6","Type":"ContainerDied","Data":"ceb9391974780989995344ac4170c9fff2afb9899976b6a96d1423e77d45fdaf"} Oct 01 14:02:21 crc kubenswrapper[4605]: I1001 14:02:21.680475 4605 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ceb9391974780989995344ac4170c9fff2afb9899976b6a96d1423e77d45fdaf" Oct 01 14:02:21 crc kubenswrapper[4605]: E1001 14:02:21.681778 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"placement-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-placement-api:current-podified\\\"\"" pod="openstack/placement-db-sync-px7rk" podUID="a3bd053b-19f8-4908-b8d5-e5c0ae5599c0" Oct 01 14:02:21 crc kubenswrapper[4605]: E1001 14:02:21.740123 4605 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-ceilometer-central:current-podified" Oct 01 14:02:21 crc kubenswrapper[4605]: E1001 14:02:21.740278 4605 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:ceilometer-central-agent,Image:quay.io/podified-antelope-centos9/openstack-ceilometer-central:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n598h5ffh56h9dh65fhb8h56fh575h588h5c7h5cbh65ch698h86h566hdch567h5bh6h5dch597h5d8h65fh75h544h694h4h546h56chf5h99h5c8q,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/var/lib/openstack/bin,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:ceilometer-central-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-fzsqw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/python3 /var/lib/openstack/bin/centralhealth.py],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:300,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ceilometer-0_openstack(df36773f-c59f-4abb-9adf-20dee81012ae): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 01 14:02:22 crc kubenswrapper[4605]: I1001 14:02:22.238473 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-5qt7p"] Oct 01 14:02:22 crc kubenswrapper[4605]: I1001 14:02:22.245496 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-5qt7p"] Oct 01 14:02:22 crc kubenswrapper[4605]: I1001 14:02:22.317687 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-kdhkw"] Oct 01 14:02:22 crc kubenswrapper[4605]: E1001 14:02:22.318010 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0aafc049-c1db-4e02-a3fa-07274790c6f6" containerName="keystone-bootstrap" Oct 01 14:02:22 crc kubenswrapper[4605]: I1001 14:02:22.322264 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="0aafc049-c1db-4e02-a3fa-07274790c6f6" containerName="keystone-bootstrap" Oct 01 14:02:22 crc kubenswrapper[4605]: I1001 14:02:22.322519 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="0aafc049-c1db-4e02-a3fa-07274790c6f6" containerName="keystone-bootstrap" Oct 01 14:02:22 crc kubenswrapper[4605]: I1001 14:02:22.323055 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-kdhkw" Oct 01 14:02:22 crc kubenswrapper[4605]: I1001 14:02:22.324700 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-t7vm5" Oct 01 14:02:22 crc kubenswrapper[4605]: I1001 14:02:22.325076 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 01 14:02:22 crc kubenswrapper[4605]: I1001 14:02:22.326165 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 01 14:02:22 crc kubenswrapper[4605]: I1001 14:02:22.326263 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 01 14:02:22 crc kubenswrapper[4605]: I1001 14:02:22.328237 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-kdhkw"] Oct 01 14:02:22 crc kubenswrapper[4605]: I1001 14:02:22.406396 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb-fernet-keys\") pod \"keystone-bootstrap-kdhkw\" (UID: \"9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb\") " pod="openstack/keystone-bootstrap-kdhkw" Oct 01 14:02:22 crc kubenswrapper[4605]: I1001 14:02:22.406448 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n5krx\" (UniqueName: \"kubernetes.io/projected/9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb-kube-api-access-n5krx\") pod \"keystone-bootstrap-kdhkw\" (UID: \"9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb\") " pod="openstack/keystone-bootstrap-kdhkw" Oct 01 14:02:22 crc kubenswrapper[4605]: I1001 14:02:22.406493 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb-scripts\") pod \"keystone-bootstrap-kdhkw\" (UID: \"9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb\") " pod="openstack/keystone-bootstrap-kdhkw" Oct 01 14:02:22 crc kubenswrapper[4605]: I1001 14:02:22.406556 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb-config-data\") pod \"keystone-bootstrap-kdhkw\" (UID: \"9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb\") " pod="openstack/keystone-bootstrap-kdhkw" Oct 01 14:02:22 crc kubenswrapper[4605]: I1001 14:02:22.406586 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb-combined-ca-bundle\") pod \"keystone-bootstrap-kdhkw\" (UID: \"9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb\") " pod="openstack/keystone-bootstrap-kdhkw" Oct 01 14:02:22 crc kubenswrapper[4605]: I1001 14:02:22.406653 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb-credential-keys\") pod \"keystone-bootstrap-kdhkw\" (UID: \"9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb\") " pod="openstack/keystone-bootstrap-kdhkw" Oct 01 14:02:22 crc kubenswrapper[4605]: I1001 14:02:22.508433 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb-credential-keys\") pod \"keystone-bootstrap-kdhkw\" (UID: \"9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb\") " pod="openstack/keystone-bootstrap-kdhkw" Oct 01 14:02:22 crc kubenswrapper[4605]: I1001 14:02:22.508520 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb-fernet-keys\") pod \"keystone-bootstrap-kdhkw\" (UID: \"9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb\") " pod="openstack/keystone-bootstrap-kdhkw" Oct 01 14:02:22 crc kubenswrapper[4605]: I1001 14:02:22.508543 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n5krx\" (UniqueName: \"kubernetes.io/projected/9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb-kube-api-access-n5krx\") pod \"keystone-bootstrap-kdhkw\" (UID: \"9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb\") " pod="openstack/keystone-bootstrap-kdhkw" Oct 01 14:02:22 crc kubenswrapper[4605]: I1001 14:02:22.508589 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb-scripts\") pod \"keystone-bootstrap-kdhkw\" (UID: \"9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb\") " pod="openstack/keystone-bootstrap-kdhkw" Oct 01 14:02:22 crc kubenswrapper[4605]: I1001 14:02:22.508663 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb-config-data\") pod \"keystone-bootstrap-kdhkw\" (UID: \"9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb\") " pod="openstack/keystone-bootstrap-kdhkw" Oct 01 14:02:22 crc kubenswrapper[4605]: I1001 14:02:22.508704 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb-combined-ca-bundle\") pod \"keystone-bootstrap-kdhkw\" (UID: \"9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb\") " pod="openstack/keystone-bootstrap-kdhkw" Oct 01 14:02:22 crc kubenswrapper[4605]: I1001 14:02:22.521694 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb-combined-ca-bundle\") pod \"keystone-bootstrap-kdhkw\" (UID: \"9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb\") " pod="openstack/keystone-bootstrap-kdhkw" Oct 01 14:02:22 crc kubenswrapper[4605]: I1001 14:02:22.522831 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb-credential-keys\") pod \"keystone-bootstrap-kdhkw\" (UID: \"9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb\") " pod="openstack/keystone-bootstrap-kdhkw" Oct 01 14:02:22 crc kubenswrapper[4605]: I1001 14:02:22.523139 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb-fernet-keys\") pod \"keystone-bootstrap-kdhkw\" (UID: \"9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb\") " pod="openstack/keystone-bootstrap-kdhkw" Oct 01 14:02:22 crc kubenswrapper[4605]: I1001 14:02:22.523629 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb-config-data\") pod \"keystone-bootstrap-kdhkw\" (UID: \"9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb\") " pod="openstack/keystone-bootstrap-kdhkw" Oct 01 14:02:22 crc kubenswrapper[4605]: I1001 14:02:22.523930 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb-scripts\") pod \"keystone-bootstrap-kdhkw\" (UID: \"9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb\") " pod="openstack/keystone-bootstrap-kdhkw" Oct 01 14:02:22 crc kubenswrapper[4605]: I1001 14:02:22.527444 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n5krx\" (UniqueName: \"kubernetes.io/projected/9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb-kube-api-access-n5krx\") pod \"keystone-bootstrap-kdhkw\" (UID: \"9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb\") " pod="openstack/keystone-bootstrap-kdhkw" Oct 01 14:02:22 crc kubenswrapper[4605]: I1001 14:02:22.683238 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-kdhkw" Oct 01 14:02:23 crc kubenswrapper[4605]: I1001 14:02:23.123162 4605 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-77585f5f8c-gxlcx" podUID="bd54105c-5b1e-4706-9fe8-83cdca837998" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.131:5353: connect: connection refused" Oct 01 14:02:23 crc kubenswrapper[4605]: I1001 14:02:23.935641 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0aafc049-c1db-4e02-a3fa-07274790c6f6" path="/var/lib/kubelet/pods/0aafc049-c1db-4e02-a3fa-07274790c6f6/volumes" Oct 01 14:02:26 crc kubenswrapper[4605]: E1001 14:02:26.382414 4605 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Oct 01 14:02:26 crc kubenswrapper[4605]: E1001 14:02:26.382928 4605 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n8fh96h5bbh5bch668h5h598h548h5bh675h5dchdchb9h9bh9bh5fbh9h55dh54fh5f5hd7h5fdh576h68dh587h564h5d8hd9h9bh5d9hc8hc4q,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-n8nk8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-5d459df97c-xmd6q_openstack(2cc1af3d-7d60-4b25-9b13-d563d3f0e31b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 01 14:02:26 crc kubenswrapper[4605]: E1001 14:02:26.384997 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-5d459df97c-xmd6q" podUID="2cc1af3d-7d60-4b25-9b13-d563d3f0e31b" Oct 01 14:02:26 crc kubenswrapper[4605]: E1001 14:02:26.399402 4605 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Oct 01 14:02:26 crc kubenswrapper[4605]: E1001 14:02:26.399583 4605 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n84h5b9hd6hfch55dh675h666h599hf8h85h59h568h88hbh575h5fch5c9h574hb9h648h57fh598h66hd8h5f7h67dh59ch88h677h87h687h59fq,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-c4gdt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-7997989dbf-ht8bk_openstack(04aa43c0-e349-48c1-bd68-53fdbbb92639): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 01 14:02:26 crc kubenswrapper[4605]: E1001 14:02:26.401749 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-7997989dbf-ht8bk" podUID="04aa43c0-e349-48c1-bd68-53fdbbb92639" Oct 01 14:02:26 crc kubenswrapper[4605]: E1001 14:02:26.407363 4605 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Oct 01 14:02:26 crc kubenswrapper[4605]: E1001 14:02:26.407528 4605 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n654h5c9h599h65chf9h5cfh687hc7h6fh597h68dhb5h686hdh9bh645h5b4hc7hffh68hfdh5cbh59h5d6h5bdh9ch6bh659h5c6h5dh56ch9cq,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-nlgtj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-f76447ddf-ghwkf_openstack(f94fe2a1-eaf1-4f5d-9f7e-4e789f46d81c): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 01 14:02:26 crc kubenswrapper[4605]: E1001 14:02:26.410137 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-f76447ddf-ghwkf" podUID="f94fe2a1-eaf1-4f5d-9f7e-4e789f46d81c" Oct 01 14:02:33 crc kubenswrapper[4605]: I1001 14:02:33.124414 4605 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-77585f5f8c-gxlcx" podUID="bd54105c-5b1e-4706-9fe8-83cdca837998" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.131:5353: i/o timeout" Oct 01 14:02:33 crc kubenswrapper[4605]: I1001 14:02:33.125274 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-77585f5f8c-gxlcx" Oct 01 14:02:34 crc kubenswrapper[4605]: I1001 14:02:34.408368 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 01 14:02:34 crc kubenswrapper[4605]: I1001 14:02:34.408720 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 01 14:02:36 crc kubenswrapper[4605]: I1001 14:02:36.063063 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 01 14:02:36 crc kubenswrapper[4605]: I1001 14:02:36.063376 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 01 14:02:37 crc kubenswrapper[4605]: I1001 14:02:37.505057 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7997989dbf-ht8bk" Oct 01 14:02:37 crc kubenswrapper[4605]: I1001 14:02:37.575418 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/04aa43c0-e349-48c1-bd68-53fdbbb92639-logs\") pod \"04aa43c0-e349-48c1-bd68-53fdbbb92639\" (UID: \"04aa43c0-e349-48c1-bd68-53fdbbb92639\") " Oct 01 14:02:37 crc kubenswrapper[4605]: I1001 14:02:37.575517 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c4gdt\" (UniqueName: \"kubernetes.io/projected/04aa43c0-e349-48c1-bd68-53fdbbb92639-kube-api-access-c4gdt\") pod \"04aa43c0-e349-48c1-bd68-53fdbbb92639\" (UID: \"04aa43c0-e349-48c1-bd68-53fdbbb92639\") " Oct 01 14:02:37 crc kubenswrapper[4605]: I1001 14:02:37.575907 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/04aa43c0-e349-48c1-bd68-53fdbbb92639-logs" (OuterVolumeSpecName: "logs") pod "04aa43c0-e349-48c1-bd68-53fdbbb92639" (UID: "04aa43c0-e349-48c1-bd68-53fdbbb92639"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:02:37 crc kubenswrapper[4605]: I1001 14:02:37.576480 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/04aa43c0-e349-48c1-bd68-53fdbbb92639-scripts\") pod \"04aa43c0-e349-48c1-bd68-53fdbbb92639\" (UID: \"04aa43c0-e349-48c1-bd68-53fdbbb92639\") " Oct 01 14:02:37 crc kubenswrapper[4605]: I1001 14:02:37.576585 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/04aa43c0-e349-48c1-bd68-53fdbbb92639-config-data\") pod \"04aa43c0-e349-48c1-bd68-53fdbbb92639\" (UID: \"04aa43c0-e349-48c1-bd68-53fdbbb92639\") " Oct 01 14:02:37 crc kubenswrapper[4605]: I1001 14:02:37.576619 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/04aa43c0-e349-48c1-bd68-53fdbbb92639-horizon-secret-key\") pod \"04aa43c0-e349-48c1-bd68-53fdbbb92639\" (UID: \"04aa43c0-e349-48c1-bd68-53fdbbb92639\") " Oct 01 14:02:37 crc kubenswrapper[4605]: I1001 14:02:37.576979 4605 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/04aa43c0-e349-48c1-bd68-53fdbbb92639-logs\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:37 crc kubenswrapper[4605]: I1001 14:02:37.577026 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/04aa43c0-e349-48c1-bd68-53fdbbb92639-scripts" (OuterVolumeSpecName: "scripts") pod "04aa43c0-e349-48c1-bd68-53fdbbb92639" (UID: "04aa43c0-e349-48c1-bd68-53fdbbb92639"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:02:37 crc kubenswrapper[4605]: I1001 14:02:37.577398 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/04aa43c0-e349-48c1-bd68-53fdbbb92639-config-data" (OuterVolumeSpecName: "config-data") pod "04aa43c0-e349-48c1-bd68-53fdbbb92639" (UID: "04aa43c0-e349-48c1-bd68-53fdbbb92639"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:02:37 crc kubenswrapper[4605]: I1001 14:02:37.582310 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/04aa43c0-e349-48c1-bd68-53fdbbb92639-kube-api-access-c4gdt" (OuterVolumeSpecName: "kube-api-access-c4gdt") pod "04aa43c0-e349-48c1-bd68-53fdbbb92639" (UID: "04aa43c0-e349-48c1-bd68-53fdbbb92639"). InnerVolumeSpecName "kube-api-access-c4gdt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:02:37 crc kubenswrapper[4605]: I1001 14:02:37.582402 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04aa43c0-e349-48c1-bd68-53fdbbb92639-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "04aa43c0-e349-48c1-bd68-53fdbbb92639" (UID: "04aa43c0-e349-48c1-bd68-53fdbbb92639"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:02:37 crc kubenswrapper[4605]: I1001 14:02:37.678542 4605 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/04aa43c0-e349-48c1-bd68-53fdbbb92639-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:37 crc kubenswrapper[4605]: I1001 14:02:37.678578 4605 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/04aa43c0-e349-48c1-bd68-53fdbbb92639-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:37 crc kubenswrapper[4605]: I1001 14:02:37.678590 4605 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/04aa43c0-e349-48c1-bd68-53fdbbb92639-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:37 crc kubenswrapper[4605]: I1001 14:02:37.678601 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c4gdt\" (UniqueName: \"kubernetes.io/projected/04aa43c0-e349-48c1-bd68-53fdbbb92639-kube-api-access-c4gdt\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:37 crc kubenswrapper[4605]: I1001 14:02:37.814971 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7997989dbf-ht8bk" event={"ID":"04aa43c0-e349-48c1-bd68-53fdbbb92639","Type":"ContainerDied","Data":"27eb29b1051418a83e193a0e731e6016eca4716da276703ddd2034c5790df752"} Oct 01 14:02:37 crc kubenswrapper[4605]: I1001 14:02:37.815024 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7997989dbf-ht8bk" Oct 01 14:02:37 crc kubenswrapper[4605]: I1001 14:02:37.886038 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7997989dbf-ht8bk"] Oct 01 14:02:37 crc kubenswrapper[4605]: I1001 14:02:37.892594 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-7997989dbf-ht8bk"] Oct 01 14:02:37 crc kubenswrapper[4605]: I1001 14:02:37.938240 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="04aa43c0-e349-48c1-bd68-53fdbbb92639" path="/var/lib/kubelet/pods/04aa43c0-e349-48c1-bd68-53fdbbb92639/volumes" Oct 01 14:02:38 crc kubenswrapper[4605]: I1001 14:02:38.125736 4605 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-77585f5f8c-gxlcx" podUID="bd54105c-5b1e-4706-9fe8-83cdca837998" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.131:5353: i/o timeout" Oct 01 14:02:38 crc kubenswrapper[4605]: E1001 14:02:38.636113 4605 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = reading blob sha256:d9d298ba6954298e99244a71cad0e9c49e72b091966658cd5447165327ba4b55: Get \"https://quay.io/v2/podified-antelope-centos9/openstack-barbican-api/blobs/sha256:d9d298ba6954298e99244a71cad0e9c49e72b091966658cd5447165327ba4b55\": context canceled" image="quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified" Oct 01 14:02:38 crc kubenswrapper[4605]: E1001 14:02:38.636288 4605 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:barbican-db-sync,Image:quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified,Command:[/bin/bash],Args:[-c barbican-manage db upgrade],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/barbican/barbican.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-mrznj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42403,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42403,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-db-sync-xj5tw_openstack(e8f994e1-7878-41ab-b619-4946d957e710): ErrImagePull: rpc error: code = Canceled desc = reading blob sha256:d9d298ba6954298e99244a71cad0e9c49e72b091966658cd5447165327ba4b55: Get \"https://quay.io/v2/podified-antelope-centos9/openstack-barbican-api/blobs/sha256:d9d298ba6954298e99244a71cad0e9c49e72b091966658cd5447165327ba4b55\": context canceled" logger="UnhandledError" Oct 01 14:02:38 crc kubenswrapper[4605]: E1001 14:02:38.637447 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = reading blob sha256:d9d298ba6954298e99244a71cad0e9c49e72b091966658cd5447165327ba4b55: Get \\\"https://quay.io/v2/podified-antelope-centos9/openstack-barbican-api/blobs/sha256:d9d298ba6954298e99244a71cad0e9c49e72b091966658cd5447165327ba4b55\\\": context canceled\"" pod="openstack/barbican-db-sync-xj5tw" podUID="e8f994e1-7878-41ab-b619-4946d957e710" Oct 01 14:02:38 crc kubenswrapper[4605]: E1001 14:02:38.666587 4605 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified" Oct 01 14:02:38 crc kubenswrapper[4605]: E1001 14:02:38.666744 4605 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-bvfhk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-978c2_openstack(287fa988-b116-4b4d-a02c-990e801124d0): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 01 14:02:38 crc kubenswrapper[4605]: E1001 14:02:38.667851 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-978c2" podUID="287fa988-b116-4b4d-a02c-990e801124d0" Oct 01 14:02:38 crc kubenswrapper[4605]: I1001 14:02:38.801102 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5d459df97c-xmd6q" Oct 01 14:02:38 crc kubenswrapper[4605]: I1001 14:02:38.802328 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77585f5f8c-gxlcx" Oct 01 14:02:38 crc kubenswrapper[4605]: I1001 14:02:38.802769 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 01 14:02:38 crc kubenswrapper[4605]: I1001 14:02:38.828430 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 01 14:02:38 crc kubenswrapper[4605]: I1001 14:02:38.860045 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-f76447ddf-ghwkf" Oct 01 14:02:38 crc kubenswrapper[4605]: I1001 14:02:38.875673 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"77da7acd-19f2-4a61-920d-d341bd5a4598","Type":"ContainerDied","Data":"3529b92f4a77ab215542d9aa999a78e8b2f783f90c0341852ada92b8513188e5"} Oct 01 14:02:38 crc kubenswrapper[4605]: I1001 14:02:38.875789 4605 scope.go:117] "RemoveContainer" containerID="6a329f5fe313bbe6ee29f4bcaa54a5e91d6df26eb5e466edea66faede6e30ddb" Oct 01 14:02:38 crc kubenswrapper[4605]: I1001 14:02:38.876022 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 01 14:02:38 crc kubenswrapper[4605]: I1001 14:02:38.882159 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77585f5f8c-gxlcx" Oct 01 14:02:38 crc kubenswrapper[4605]: I1001 14:02:38.882189 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77585f5f8c-gxlcx" event={"ID":"bd54105c-5b1e-4706-9fe8-83cdca837998","Type":"ContainerDied","Data":"635539bf9bc86d34f11d4bad9dcb4a13fddc776c030942d1ffcfae06435a8112"} Oct 01 14:02:38 crc kubenswrapper[4605]: I1001 14:02:38.892303 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5d459df97c-xmd6q" Oct 01 14:02:38 crc kubenswrapper[4605]: I1001 14:02:38.892768 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5d459df97c-xmd6q" event={"ID":"2cc1af3d-7d60-4b25-9b13-d563d3f0e31b","Type":"ContainerDied","Data":"32b5d36334950886671658fbd63dbbdfd392b4e86fa1c816a2e130481cc76a7d"} Oct 01 14:02:38 crc kubenswrapper[4605]: I1001 14:02:38.902151 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-f76447ddf-ghwkf" event={"ID":"f94fe2a1-eaf1-4f5d-9f7e-4e789f46d81c","Type":"ContainerDied","Data":"d57d4ebd5550bcd6db311f9d594ec3fd59d181a486b4d6115bc15ebdd83c62b0"} Oct 01 14:02:38 crc kubenswrapper[4605]: I1001 14:02:38.902255 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-f76447ddf-ghwkf" Oct 01 14:02:38 crc kubenswrapper[4605]: I1001 14:02:38.907392 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/f94fe2a1-eaf1-4f5d-9f7e-4e789f46d81c-horizon-secret-key\") pod \"f94fe2a1-eaf1-4f5d-9f7e-4e789f46d81c\" (UID: \"f94fe2a1-eaf1-4f5d-9f7e-4e789f46d81c\") " Oct 01 14:02:38 crc kubenswrapper[4605]: I1001 14:02:38.907425 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"77da7acd-19f2-4a61-920d-d341bd5a4598\" (UID: \"77da7acd-19f2-4a61-920d-d341bd5a4598\") " Oct 01 14:02:38 crc kubenswrapper[4605]: I1001 14:02:38.907464 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ab88d000-56c4-4cba-b2e3-efb25f217503-scripts\") pod \"ab88d000-56c4-4cba-b2e3-efb25f217503\" (UID: \"ab88d000-56c4-4cba-b2e3-efb25f217503\") " Oct 01 14:02:38 crc kubenswrapper[4605]: I1001 14:02:38.907484 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2cc1af3d-7d60-4b25-9b13-d563d3f0e31b-logs\") pod \"2cc1af3d-7d60-4b25-9b13-d563d3f0e31b\" (UID: \"2cc1af3d-7d60-4b25-9b13-d563d3f0e31b\") " Oct 01 14:02:38 crc kubenswrapper[4605]: I1001 14:02:38.907507 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2cc1af3d-7d60-4b25-9b13-d563d3f0e31b-config-data\") pod \"2cc1af3d-7d60-4b25-9b13-d563d3f0e31b\" (UID: \"2cc1af3d-7d60-4b25-9b13-d563d3f0e31b\") " Oct 01 14:02:38 crc kubenswrapper[4605]: I1001 14:02:38.907556 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n8nk8\" (UniqueName: \"kubernetes.io/projected/2cc1af3d-7d60-4b25-9b13-d563d3f0e31b-kube-api-access-n8nk8\") pod \"2cc1af3d-7d60-4b25-9b13-d563d3f0e31b\" (UID: \"2cc1af3d-7d60-4b25-9b13-d563d3f0e31b\") " Oct 01 14:02:38 crc kubenswrapper[4605]: I1001 14:02:38.907573 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f94fe2a1-eaf1-4f5d-9f7e-4e789f46d81c-logs\") pod \"f94fe2a1-eaf1-4f5d-9f7e-4e789f46d81c\" (UID: \"f94fe2a1-eaf1-4f5d-9f7e-4e789f46d81c\") " Oct 01 14:02:38 crc kubenswrapper[4605]: I1001 14:02:38.907591 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/77da7acd-19f2-4a61-920d-d341bd5a4598-logs\") pod \"77da7acd-19f2-4a61-920d-d341bd5a4598\" (UID: \"77da7acd-19f2-4a61-920d-d341bd5a4598\") " Oct 01 14:02:38 crc kubenswrapper[4605]: I1001 14:02:38.907615 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xdm8f\" (UniqueName: \"kubernetes.io/projected/bd54105c-5b1e-4706-9fe8-83cdca837998-kube-api-access-xdm8f\") pod \"bd54105c-5b1e-4706-9fe8-83cdca837998\" (UID: \"bd54105c-5b1e-4706-9fe8-83cdca837998\") " Oct 01 14:02:38 crc kubenswrapper[4605]: I1001 14:02:38.907633 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab88d000-56c4-4cba-b2e3-efb25f217503-config-data\") pod \"ab88d000-56c4-4cba-b2e3-efb25f217503\" (UID: \"ab88d000-56c4-4cba-b2e3-efb25f217503\") " Oct 01 14:02:38 crc kubenswrapper[4605]: I1001 14:02:38.907659 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/77da7acd-19f2-4a61-920d-d341bd5a4598-scripts\") pod \"77da7acd-19f2-4a61-920d-d341bd5a4598\" (UID: \"77da7acd-19f2-4a61-920d-d341bd5a4598\") " Oct 01 14:02:38 crc kubenswrapper[4605]: I1001 14:02:38.907691 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab88d000-56c4-4cba-b2e3-efb25f217503-combined-ca-bundle\") pod \"ab88d000-56c4-4cba-b2e3-efb25f217503\" (UID: \"ab88d000-56c4-4cba-b2e3-efb25f217503\") " Oct 01 14:02:38 crc kubenswrapper[4605]: I1001 14:02:38.907715 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nlgtj\" (UniqueName: \"kubernetes.io/projected/f94fe2a1-eaf1-4f5d-9f7e-4e789f46d81c-kube-api-access-nlgtj\") pod \"f94fe2a1-eaf1-4f5d-9f7e-4e789f46d81c\" (UID: \"f94fe2a1-eaf1-4f5d-9f7e-4e789f46d81c\") " Oct 01 14:02:38 crc kubenswrapper[4605]: I1001 14:02:38.907740 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ab88d000-56c4-4cba-b2e3-efb25f217503-logs\") pod \"ab88d000-56c4-4cba-b2e3-efb25f217503\" (UID: \"ab88d000-56c4-4cba-b2e3-efb25f217503\") " Oct 01 14:02:38 crc kubenswrapper[4605]: I1001 14:02:38.907773 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bd54105c-5b1e-4706-9fe8-83cdca837998-dns-swift-storage-0\") pod \"bd54105c-5b1e-4706-9fe8-83cdca837998\" (UID: \"bd54105c-5b1e-4706-9fe8-83cdca837998\") " Oct 01 14:02:38 crc kubenswrapper[4605]: I1001 14:02:38.907790 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qrjkr\" (UniqueName: \"kubernetes.io/projected/ab88d000-56c4-4cba-b2e3-efb25f217503-kube-api-access-qrjkr\") pod \"ab88d000-56c4-4cba-b2e3-efb25f217503\" (UID: \"ab88d000-56c4-4cba-b2e3-efb25f217503\") " Oct 01 14:02:38 crc kubenswrapper[4605]: I1001 14:02:38.907809 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77da7acd-19f2-4a61-920d-d341bd5a4598-combined-ca-bundle\") pod \"77da7acd-19f2-4a61-920d-d341bd5a4598\" (UID: \"77da7acd-19f2-4a61-920d-d341bd5a4598\") " Oct 01 14:02:38 crc kubenswrapper[4605]: I1001 14:02:38.907823 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bd54105c-5b1e-4706-9fe8-83cdca837998-ovsdbserver-nb\") pod \"bd54105c-5b1e-4706-9fe8-83cdca837998\" (UID: \"bd54105c-5b1e-4706-9fe8-83cdca837998\") " Oct 01 14:02:38 crc kubenswrapper[4605]: I1001 14:02:38.907861 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f94fe2a1-eaf1-4f5d-9f7e-4e789f46d81c-config-data\") pod \"f94fe2a1-eaf1-4f5d-9f7e-4e789f46d81c\" (UID: \"f94fe2a1-eaf1-4f5d-9f7e-4e789f46d81c\") " Oct 01 14:02:38 crc kubenswrapper[4605]: I1001 14:02:38.907886 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9h8gd\" (UniqueName: \"kubernetes.io/projected/77da7acd-19f2-4a61-920d-d341bd5a4598-kube-api-access-9h8gd\") pod \"77da7acd-19f2-4a61-920d-d341bd5a4598\" (UID: \"77da7acd-19f2-4a61-920d-d341bd5a4598\") " Oct 01 14:02:38 crc kubenswrapper[4605]: I1001 14:02:38.907906 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bd54105c-5b1e-4706-9fe8-83cdca837998-config\") pod \"bd54105c-5b1e-4706-9fe8-83cdca837998\" (UID: \"bd54105c-5b1e-4706-9fe8-83cdca837998\") " Oct 01 14:02:38 crc kubenswrapper[4605]: I1001 14:02:38.907923 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bd54105c-5b1e-4706-9fe8-83cdca837998-ovsdbserver-sb\") pod \"bd54105c-5b1e-4706-9fe8-83cdca837998\" (UID: \"bd54105c-5b1e-4706-9fe8-83cdca837998\") " Oct 01 14:02:38 crc kubenswrapper[4605]: I1001 14:02:38.907937 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ab88d000-56c4-4cba-b2e3-efb25f217503-httpd-run\") pod \"ab88d000-56c4-4cba-b2e3-efb25f217503\" (UID: \"ab88d000-56c4-4cba-b2e3-efb25f217503\") " Oct 01 14:02:38 crc kubenswrapper[4605]: I1001 14:02:38.907949 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ab88d000-56c4-4cba-b2e3-efb25f217503\" (UID: \"ab88d000-56c4-4cba-b2e3-efb25f217503\") " Oct 01 14:02:38 crc kubenswrapper[4605]: I1001 14:02:38.907971 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/77da7acd-19f2-4a61-920d-d341bd5a4598-httpd-run\") pod \"77da7acd-19f2-4a61-920d-d341bd5a4598\" (UID: \"77da7acd-19f2-4a61-920d-d341bd5a4598\") " Oct 01 14:02:38 crc kubenswrapper[4605]: I1001 14:02:38.908008 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2cc1af3d-7d60-4b25-9b13-d563d3f0e31b-scripts\") pod \"2cc1af3d-7d60-4b25-9b13-d563d3f0e31b\" (UID: \"2cc1af3d-7d60-4b25-9b13-d563d3f0e31b\") " Oct 01 14:02:38 crc kubenswrapper[4605]: I1001 14:02:38.908029 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/2cc1af3d-7d60-4b25-9b13-d563d3f0e31b-horizon-secret-key\") pod \"2cc1af3d-7d60-4b25-9b13-d563d3f0e31b\" (UID: \"2cc1af3d-7d60-4b25-9b13-d563d3f0e31b\") " Oct 01 14:02:38 crc kubenswrapper[4605]: I1001 14:02:38.908050 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bd54105c-5b1e-4706-9fe8-83cdca837998-dns-svc\") pod \"bd54105c-5b1e-4706-9fe8-83cdca837998\" (UID: \"bd54105c-5b1e-4706-9fe8-83cdca837998\") " Oct 01 14:02:38 crc kubenswrapper[4605]: I1001 14:02:38.908066 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/77da7acd-19f2-4a61-920d-d341bd5a4598-config-data\") pod \"77da7acd-19f2-4a61-920d-d341bd5a4598\" (UID: \"77da7acd-19f2-4a61-920d-d341bd5a4598\") " Oct 01 14:02:38 crc kubenswrapper[4605]: I1001 14:02:38.908530 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f94fe2a1-eaf1-4f5d-9f7e-4e789f46d81c-scripts\") pod \"f94fe2a1-eaf1-4f5d-9f7e-4e789f46d81c\" (UID: \"f94fe2a1-eaf1-4f5d-9f7e-4e789f46d81c\") " Oct 01 14:02:38 crc kubenswrapper[4605]: I1001 14:02:38.909880 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f94fe2a1-eaf1-4f5d-9f7e-4e789f46d81c-scripts" (OuterVolumeSpecName: "scripts") pod "f94fe2a1-eaf1-4f5d-9f7e-4e789f46d81c" (UID: "f94fe2a1-eaf1-4f5d-9f7e-4e789f46d81c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:02:38 crc kubenswrapper[4605]: I1001 14:02:38.910687 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 01 14:02:38 crc kubenswrapper[4605]: I1001 14:02:38.910853 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ab88d000-56c4-4cba-b2e3-efb25f217503","Type":"ContainerDied","Data":"47e71b574c66bd35bf96c22320261ed136a92e39c23334a9ff43b465ccda917f"} Oct 01 14:02:38 crc kubenswrapper[4605]: I1001 14:02:38.911615 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ab88d000-56c4-4cba-b2e3-efb25f217503-logs" (OuterVolumeSpecName: "logs") pod "ab88d000-56c4-4cba-b2e3-efb25f217503" (UID: "ab88d000-56c4-4cba-b2e3-efb25f217503"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:02:38 crc kubenswrapper[4605]: I1001 14:02:38.912238 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f94fe2a1-eaf1-4f5d-9f7e-4e789f46d81c-logs" (OuterVolumeSpecName: "logs") pod "f94fe2a1-eaf1-4f5d-9f7e-4e789f46d81c" (UID: "f94fe2a1-eaf1-4f5d-9f7e-4e789f46d81c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:02:38 crc kubenswrapper[4605]: I1001 14:02:38.912526 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/77da7acd-19f2-4a61-920d-d341bd5a4598-logs" (OuterVolumeSpecName: "logs") pod "77da7acd-19f2-4a61-920d-d341bd5a4598" (UID: "77da7acd-19f2-4a61-920d-d341bd5a4598"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:02:38 crc kubenswrapper[4605]: I1001 14:02:38.913492 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2cc1af3d-7d60-4b25-9b13-d563d3f0e31b-logs" (OuterVolumeSpecName: "logs") pod "2cc1af3d-7d60-4b25-9b13-d563d3f0e31b" (UID: "2cc1af3d-7d60-4b25-9b13-d563d3f0e31b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:02:38 crc kubenswrapper[4605]: E1001 14:02:38.920045 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified\\\"\"" pod="openstack/barbican-db-sync-xj5tw" podUID="e8f994e1-7878-41ab-b619-4946d957e710" Oct 01 14:02:38 crc kubenswrapper[4605]: E1001 14:02:38.920297 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified\\\"\"" pod="openstack/cinder-db-sync-978c2" podUID="287fa988-b116-4b4d-a02c-990e801124d0" Oct 01 14:02:38 crc kubenswrapper[4605]: I1001 14:02:38.922238 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2cc1af3d-7d60-4b25-9b13-d563d3f0e31b-config-data" (OuterVolumeSpecName: "config-data") pod "2cc1af3d-7d60-4b25-9b13-d563d3f0e31b" (UID: "2cc1af3d-7d60-4b25-9b13-d563d3f0e31b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:02:38 crc kubenswrapper[4605]: I1001 14:02:38.928005 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2cc1af3d-7d60-4b25-9b13-d563d3f0e31b-scripts" (OuterVolumeSpecName: "scripts") pod "2cc1af3d-7d60-4b25-9b13-d563d3f0e31b" (UID: "2cc1af3d-7d60-4b25-9b13-d563d3f0e31b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:02:38 crc kubenswrapper[4605]: I1001 14:02:38.928336 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/77da7acd-19f2-4a61-920d-d341bd5a4598-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "77da7acd-19f2-4a61-920d-d341bd5a4598" (UID: "77da7acd-19f2-4a61-920d-d341bd5a4598"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:02:38 crc kubenswrapper[4605]: I1001 14:02:38.928863 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ab88d000-56c4-4cba-b2e3-efb25f217503-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "ab88d000-56c4-4cba-b2e3-efb25f217503" (UID: "ab88d000-56c4-4cba-b2e3-efb25f217503"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:02:38 crc kubenswrapper[4605]: I1001 14:02:38.931069 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f94fe2a1-eaf1-4f5d-9f7e-4e789f46d81c-config-data" (OuterVolumeSpecName: "config-data") pod "f94fe2a1-eaf1-4f5d-9f7e-4e789f46d81c" (UID: "f94fe2a1-eaf1-4f5d-9f7e-4e789f46d81c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:02:38 crc kubenswrapper[4605]: I1001 14:02:38.932952 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/77da7acd-19f2-4a61-920d-d341bd5a4598-kube-api-access-9h8gd" (OuterVolumeSpecName: "kube-api-access-9h8gd") pod "77da7acd-19f2-4a61-920d-d341bd5a4598" (UID: "77da7acd-19f2-4a61-920d-d341bd5a4598"). InnerVolumeSpecName "kube-api-access-9h8gd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:02:38 crc kubenswrapper[4605]: I1001 14:02:38.933568 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "glance") pod "ab88d000-56c4-4cba-b2e3-efb25f217503" (UID: "ab88d000-56c4-4cba-b2e3-efb25f217503"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 01 14:02:38 crc kubenswrapper[4605]: I1001 14:02:38.936568 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f94fe2a1-eaf1-4f5d-9f7e-4e789f46d81c-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "f94fe2a1-eaf1-4f5d-9f7e-4e789f46d81c" (UID: "f94fe2a1-eaf1-4f5d-9f7e-4e789f46d81c"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:02:38 crc kubenswrapper[4605]: I1001 14:02:38.946349 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "glance") pod "77da7acd-19f2-4a61-920d-d341bd5a4598" (UID: "77da7acd-19f2-4a61-920d-d341bd5a4598"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 01 14:02:38 crc kubenswrapper[4605]: I1001 14:02:38.950115 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/77da7acd-19f2-4a61-920d-d341bd5a4598-scripts" (OuterVolumeSpecName: "scripts") pod "77da7acd-19f2-4a61-920d-d341bd5a4598" (UID: "77da7acd-19f2-4a61-920d-d341bd5a4598"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:02:38 crc kubenswrapper[4605]: I1001 14:02:38.950432 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2cc1af3d-7d60-4b25-9b13-d563d3f0e31b-kube-api-access-n8nk8" (OuterVolumeSpecName: "kube-api-access-n8nk8") pod "2cc1af3d-7d60-4b25-9b13-d563d3f0e31b" (UID: "2cc1af3d-7d60-4b25-9b13-d563d3f0e31b"). InnerVolumeSpecName "kube-api-access-n8nk8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:02:38 crc kubenswrapper[4605]: I1001 14:02:38.952814 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab88d000-56c4-4cba-b2e3-efb25f217503-scripts" (OuterVolumeSpecName: "scripts") pod "ab88d000-56c4-4cba-b2e3-efb25f217503" (UID: "ab88d000-56c4-4cba-b2e3-efb25f217503"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:02:38 crc kubenswrapper[4605]: I1001 14:02:38.953013 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd54105c-5b1e-4706-9fe8-83cdca837998-kube-api-access-xdm8f" (OuterVolumeSpecName: "kube-api-access-xdm8f") pod "bd54105c-5b1e-4706-9fe8-83cdca837998" (UID: "bd54105c-5b1e-4706-9fe8-83cdca837998"). InnerVolumeSpecName "kube-api-access-xdm8f". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:02:38 crc kubenswrapper[4605]: I1001 14:02:38.964135 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f94fe2a1-eaf1-4f5d-9f7e-4e789f46d81c-kube-api-access-nlgtj" (OuterVolumeSpecName: "kube-api-access-nlgtj") pod "f94fe2a1-eaf1-4f5d-9f7e-4e789f46d81c" (UID: "f94fe2a1-eaf1-4f5d-9f7e-4e789f46d81c"). InnerVolumeSpecName "kube-api-access-nlgtj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:02:38 crc kubenswrapper[4605]: I1001 14:02:38.973391 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab88d000-56c4-4cba-b2e3-efb25f217503-kube-api-access-qrjkr" (OuterVolumeSpecName: "kube-api-access-qrjkr") pod "ab88d000-56c4-4cba-b2e3-efb25f217503" (UID: "ab88d000-56c4-4cba-b2e3-efb25f217503"). InnerVolumeSpecName "kube-api-access-qrjkr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:02:38 crc kubenswrapper[4605]: I1001 14:02:38.973506 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2cc1af3d-7d60-4b25-9b13-d563d3f0e31b-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "2cc1af3d-7d60-4b25-9b13-d563d3f0e31b" (UID: "2cc1af3d-7d60-4b25-9b13-d563d3f0e31b"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.010880 4605 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f94fe2a1-eaf1-4f5d-9f7e-4e789f46d81c-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.010904 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9h8gd\" (UniqueName: \"kubernetes.io/projected/77da7acd-19f2-4a61-920d-d341bd5a4598-kube-api-access-9h8gd\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.010922 4605 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.010931 4605 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ab88d000-56c4-4cba-b2e3-efb25f217503-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.010939 4605 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/77da7acd-19f2-4a61-920d-d341bd5a4598-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.010950 4605 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2cc1af3d-7d60-4b25-9b13-d563d3f0e31b-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.010959 4605 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/2cc1af3d-7d60-4b25-9b13-d563d3f0e31b-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.010968 4605 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f94fe2a1-eaf1-4f5d-9f7e-4e789f46d81c-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.010975 4605 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/f94fe2a1-eaf1-4f5d-9f7e-4e789f46d81c-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.010988 4605 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.010996 4605 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ab88d000-56c4-4cba-b2e3-efb25f217503-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.011004 4605 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2cc1af3d-7d60-4b25-9b13-d563d3f0e31b-logs\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.011012 4605 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2cc1af3d-7d60-4b25-9b13-d563d3f0e31b-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.011021 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n8nk8\" (UniqueName: \"kubernetes.io/projected/2cc1af3d-7d60-4b25-9b13-d563d3f0e31b-kube-api-access-n8nk8\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.011028 4605 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f94fe2a1-eaf1-4f5d-9f7e-4e789f46d81c-logs\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.011036 4605 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/77da7acd-19f2-4a61-920d-d341bd5a4598-logs\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.011044 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xdm8f\" (UniqueName: \"kubernetes.io/projected/bd54105c-5b1e-4706-9fe8-83cdca837998-kube-api-access-xdm8f\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.011051 4605 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/77da7acd-19f2-4a61-920d-d341bd5a4598-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.011059 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nlgtj\" (UniqueName: \"kubernetes.io/projected/f94fe2a1-eaf1-4f5d-9f7e-4e789f46d81c-kube-api-access-nlgtj\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.011067 4605 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ab88d000-56c4-4cba-b2e3-efb25f217503-logs\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.011075 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qrjkr\" (UniqueName: \"kubernetes.io/projected/ab88d000-56c4-4cba-b2e3-efb25f217503-kube-api-access-qrjkr\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.026725 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab88d000-56c4-4cba-b2e3-efb25f217503-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ab88d000-56c4-4cba-b2e3-efb25f217503" (UID: "ab88d000-56c4-4cba-b2e3-efb25f217503"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.035889 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bd54105c-5b1e-4706-9fe8-83cdca837998-config" (OuterVolumeSpecName: "config") pod "bd54105c-5b1e-4706-9fe8-83cdca837998" (UID: "bd54105c-5b1e-4706-9fe8-83cdca837998"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.056158 4605 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.059209 4605 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.060589 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bd54105c-5b1e-4706-9fe8-83cdca837998-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "bd54105c-5b1e-4706-9fe8-83cdca837998" (UID: "bd54105c-5b1e-4706-9fe8-83cdca837998"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.064269 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/77da7acd-19f2-4a61-920d-d341bd5a4598-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "77da7acd-19f2-4a61-920d-d341bd5a4598" (UID: "77da7acd-19f2-4a61-920d-d341bd5a4598"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.085501 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/77da7acd-19f2-4a61-920d-d341bd5a4598-config-data" (OuterVolumeSpecName: "config-data") pod "77da7acd-19f2-4a61-920d-d341bd5a4598" (UID: "77da7acd-19f2-4a61-920d-d341bd5a4598"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.088608 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab88d000-56c4-4cba-b2e3-efb25f217503-config-data" (OuterVolumeSpecName: "config-data") pod "ab88d000-56c4-4cba-b2e3-efb25f217503" (UID: "ab88d000-56c4-4cba-b2e3-efb25f217503"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.101334 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bd54105c-5b1e-4706-9fe8-83cdca837998-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "bd54105c-5b1e-4706-9fe8-83cdca837998" (UID: "bd54105c-5b1e-4706-9fe8-83cdca837998"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.104483 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bd54105c-5b1e-4706-9fe8-83cdca837998-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "bd54105c-5b1e-4706-9fe8-83cdca837998" (UID: "bd54105c-5b1e-4706-9fe8-83cdca837998"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.106202 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bd54105c-5b1e-4706-9fe8-83cdca837998-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "bd54105c-5b1e-4706-9fe8-83cdca837998" (UID: "bd54105c-5b1e-4706-9fe8-83cdca837998"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.112399 4605 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bd54105c-5b1e-4706-9fe8-83cdca837998-config\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.112526 4605 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bd54105c-5b1e-4706-9fe8-83cdca837998-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.112598 4605 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.112686 4605 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bd54105c-5b1e-4706-9fe8-83cdca837998-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.112792 4605 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/77da7acd-19f2-4a61-920d-d341bd5a4598-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.112866 4605 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.112933 4605 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab88d000-56c4-4cba-b2e3-efb25f217503-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.112996 4605 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab88d000-56c4-4cba-b2e3-efb25f217503-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.113060 4605 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bd54105c-5b1e-4706-9fe8-83cdca837998-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.113170 4605 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77da7acd-19f2-4a61-920d-d341bd5a4598-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.113246 4605 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bd54105c-5b1e-4706-9fe8-83cdca837998-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.158667 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-64d6df575b-5ctbf"] Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.296719 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.314424 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.328315 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 01 14:02:39 crc kubenswrapper[4605]: E1001 14:02:39.328659 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd54105c-5b1e-4706-9fe8-83cdca837998" containerName="dnsmasq-dns" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.328674 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd54105c-5b1e-4706-9fe8-83cdca837998" containerName="dnsmasq-dns" Oct 01 14:02:39 crc kubenswrapper[4605]: E1001 14:02:39.328689 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab88d000-56c4-4cba-b2e3-efb25f217503" containerName="glance-httpd" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.328695 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab88d000-56c4-4cba-b2e3-efb25f217503" containerName="glance-httpd" Oct 01 14:02:39 crc kubenswrapper[4605]: E1001 14:02:39.328709 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd54105c-5b1e-4706-9fe8-83cdca837998" containerName="init" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.328715 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd54105c-5b1e-4706-9fe8-83cdca837998" containerName="init" Oct 01 14:02:39 crc kubenswrapper[4605]: E1001 14:02:39.328728 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77da7acd-19f2-4a61-920d-d341bd5a4598" containerName="glance-httpd" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.328734 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="77da7acd-19f2-4a61-920d-d341bd5a4598" containerName="glance-httpd" Oct 01 14:02:39 crc kubenswrapper[4605]: E1001 14:02:39.328743 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab88d000-56c4-4cba-b2e3-efb25f217503" containerName="glance-log" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.328749 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab88d000-56c4-4cba-b2e3-efb25f217503" containerName="glance-log" Oct 01 14:02:39 crc kubenswrapper[4605]: E1001 14:02:39.328758 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77da7acd-19f2-4a61-920d-d341bd5a4598" containerName="glance-log" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.328764 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="77da7acd-19f2-4a61-920d-d341bd5a4598" containerName="glance-log" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.328926 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="bd54105c-5b1e-4706-9fe8-83cdca837998" containerName="dnsmasq-dns" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.328941 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="77da7acd-19f2-4a61-920d-d341bd5a4598" containerName="glance-log" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.328954 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="77da7acd-19f2-4a61-920d-d341bd5a4598" containerName="glance-httpd" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.328964 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab88d000-56c4-4cba-b2e3-efb25f217503" containerName="glance-httpd" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.328974 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab88d000-56c4-4cba-b2e3-efb25f217503" containerName="glance-log" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.329853 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.333809 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.333984 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.334181 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-nbts5" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.334300 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.349007 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.364230 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-gxlcx"] Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.391836 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-gxlcx"] Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.405609 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-5d459df97c-xmd6q"] Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.415510 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-5d459df97c-xmd6q"] Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.426490 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-f76447ddf-ghwkf"] Oct 01 14:02:39 crc kubenswrapper[4605]: E1001 14:02:39.427407 4605 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-ceilometer-notification:current-podified" Oct 01 14:02:39 crc kubenswrapper[4605]: E1001 14:02:39.427593 4605 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:ceilometer-notification-agent,Image:quay.io/podified-antelope-centos9/openstack-ceilometer-notification:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n598h5ffh56h9dh65fhb8h56fh575h588h5c7h5cbh65ch698h86h566hdch567h5bh6h5dch597h5d8h65fh75h544h694h4h546h56chf5h99h5c8q,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/var/lib/openstack/bin,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:ceilometer-notification-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-fzsqw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/python3 /var/lib/openstack/bin/notificationhealth.py],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:300,TimeoutSeconds:5,PeriodSeconds:30,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ceilometer-0_openstack(df36773f-c59f-4abb-9adf-20dee81012ae): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.432533 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-f76447ddf-ghwkf"] Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.443087 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d\") " pod="openstack/glance-default-internal-api-0" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.443142 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d-scripts\") pod \"glance-default-internal-api-0\" (UID: \"8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d\") " pod="openstack/glance-default-internal-api-0" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.443185 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d-logs\") pod \"glance-default-internal-api-0\" (UID: \"8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d\") " pod="openstack/glance-default-internal-api-0" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.443220 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d\") " pod="openstack/glance-default-internal-api-0" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.443283 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wcmkf\" (UniqueName: \"kubernetes.io/projected/8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d-kube-api-access-wcmkf\") pod \"glance-default-internal-api-0\" (UID: \"8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d\") " pod="openstack/glance-default-internal-api-0" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.443363 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d\") " pod="openstack/glance-default-internal-api-0" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.443416 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d-config-data\") pod \"glance-default-internal-api-0\" (UID: \"8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d\") " pod="openstack/glance-default-internal-api-0" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.443442 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d\") " pod="openstack/glance-default-internal-api-0" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.450423 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.457044 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 01 14:02:39 crc kubenswrapper[4605]: W1001 14:02:39.467895 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod80fb1c51_bd86_4896_8dac_59747473f066.slice/crio-0b145cd8afc897d045e352e5f5f19e9ff8e5923157ba15737c0e22a5ce2b1798 WatchSource:0}: Error finding container 0b145cd8afc897d045e352e5f5f19e9ff8e5923157ba15737c0e22a5ce2b1798: Status 404 returned error can't find the container with id 0b145cd8afc897d045e352e5f5f19e9ff8e5923157ba15737c0e22a5ce2b1798 Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.477469 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.480047 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.481124 4605 scope.go:117] "RemoveContainer" containerID="91bb02d320f138cd86487aa8949cbbcc68cbf3d09324bc4a2556d5a39d44b11f" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.483681 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.485042 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.489535 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.529319 4605 scope.go:117] "RemoveContainer" containerID="3511825df56b94b1ff17ea8a204fc8b39957674d6f9b10331cbcbacaa129dfb9" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.544882 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wcmkf\" (UniqueName: \"kubernetes.io/projected/8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d-kube-api-access-wcmkf\") pod \"glance-default-internal-api-0\" (UID: \"8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d\") " pod="openstack/glance-default-internal-api-0" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.544925 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/46085e14-7d7e-490a-8078-d2a40a4f3498-scripts\") pod \"glance-default-external-api-0\" (UID: \"46085e14-7d7e-490a-8078-d2a40a4f3498\") " pod="openstack/glance-default-external-api-0" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.544947 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"46085e14-7d7e-490a-8078-d2a40a4f3498\") " pod="openstack/glance-default-external-api-0" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.544975 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d\") " pod="openstack/glance-default-internal-api-0" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.544999 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/46085e14-7d7e-490a-8078-d2a40a4f3498-config-data\") pod \"glance-default-external-api-0\" (UID: \"46085e14-7d7e-490a-8078-d2a40a4f3498\") " pod="openstack/glance-default-external-api-0" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.545026 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d-config-data\") pod \"glance-default-internal-api-0\" (UID: \"8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d\") " pod="openstack/glance-default-internal-api-0" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.545042 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/46085e14-7d7e-490a-8078-d2a40a4f3498-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"46085e14-7d7e-490a-8078-d2a40a4f3498\") " pod="openstack/glance-default-external-api-0" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.545068 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d\") " pod="openstack/glance-default-internal-api-0" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.545120 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kwhch\" (UniqueName: \"kubernetes.io/projected/46085e14-7d7e-490a-8078-d2a40a4f3498-kube-api-access-kwhch\") pod \"glance-default-external-api-0\" (UID: \"46085e14-7d7e-490a-8078-d2a40a4f3498\") " pod="openstack/glance-default-external-api-0" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.545144 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d\") " pod="openstack/glance-default-internal-api-0" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.545159 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d-scripts\") pod \"glance-default-internal-api-0\" (UID: \"8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d\") " pod="openstack/glance-default-internal-api-0" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.545181 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46085e14-7d7e-490a-8078-d2a40a4f3498-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"46085e14-7d7e-490a-8078-d2a40a4f3498\") " pod="openstack/glance-default-external-api-0" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.545204 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d-logs\") pod \"glance-default-internal-api-0\" (UID: \"8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d\") " pod="openstack/glance-default-internal-api-0" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.545224 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/46085e14-7d7e-490a-8078-d2a40a4f3498-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"46085e14-7d7e-490a-8078-d2a40a4f3498\") " pod="openstack/glance-default-external-api-0" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.545239 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d\") " pod="openstack/glance-default-internal-api-0" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.545274 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/46085e14-7d7e-490a-8078-d2a40a4f3498-logs\") pod \"glance-default-external-api-0\" (UID: \"46085e14-7d7e-490a-8078-d2a40a4f3498\") " pod="openstack/glance-default-external-api-0" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.545737 4605 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/glance-default-internal-api-0" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.546562 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d\") " pod="openstack/glance-default-internal-api-0" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.552988 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d-logs\") pod \"glance-default-internal-api-0\" (UID: \"8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d\") " pod="openstack/glance-default-internal-api-0" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.568716 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d\") " pod="openstack/glance-default-internal-api-0" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.568819 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d-config-data\") pod \"glance-default-internal-api-0\" (UID: \"8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d\") " pod="openstack/glance-default-internal-api-0" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.569158 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d-scripts\") pod \"glance-default-internal-api-0\" (UID: \"8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d\") " pod="openstack/glance-default-internal-api-0" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.571575 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wcmkf\" (UniqueName: \"kubernetes.io/projected/8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d-kube-api-access-wcmkf\") pod \"glance-default-internal-api-0\" (UID: \"8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d\") " pod="openstack/glance-default-internal-api-0" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.572730 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d\") " pod="openstack/glance-default-internal-api-0" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.577384 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d\") " pod="openstack/glance-default-internal-api-0" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.592269 4605 scope.go:117] "RemoveContainer" containerID="aa2f75a0327a9c1e41e6834536f7d410fc04a8a2f2efae406b5eac1f46917f16" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.648076 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/46085e14-7d7e-490a-8078-d2a40a4f3498-config-data\") pod \"glance-default-external-api-0\" (UID: \"46085e14-7d7e-490a-8078-d2a40a4f3498\") " pod="openstack/glance-default-external-api-0" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.648140 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/46085e14-7d7e-490a-8078-d2a40a4f3498-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"46085e14-7d7e-490a-8078-d2a40a4f3498\") " pod="openstack/glance-default-external-api-0" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.648193 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kwhch\" (UniqueName: \"kubernetes.io/projected/46085e14-7d7e-490a-8078-d2a40a4f3498-kube-api-access-kwhch\") pod \"glance-default-external-api-0\" (UID: \"46085e14-7d7e-490a-8078-d2a40a4f3498\") " pod="openstack/glance-default-external-api-0" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.648222 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46085e14-7d7e-490a-8078-d2a40a4f3498-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"46085e14-7d7e-490a-8078-d2a40a4f3498\") " pod="openstack/glance-default-external-api-0" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.648248 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/46085e14-7d7e-490a-8078-d2a40a4f3498-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"46085e14-7d7e-490a-8078-d2a40a4f3498\") " pod="openstack/glance-default-external-api-0" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.648307 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/46085e14-7d7e-490a-8078-d2a40a4f3498-logs\") pod \"glance-default-external-api-0\" (UID: \"46085e14-7d7e-490a-8078-d2a40a4f3498\") " pod="openstack/glance-default-external-api-0" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.648351 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/46085e14-7d7e-490a-8078-d2a40a4f3498-scripts\") pod \"glance-default-external-api-0\" (UID: \"46085e14-7d7e-490a-8078-d2a40a4f3498\") " pod="openstack/glance-default-external-api-0" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.648394 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"46085e14-7d7e-490a-8078-d2a40a4f3498\") " pod="openstack/glance-default-external-api-0" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.648773 4605 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"46085e14-7d7e-490a-8078-d2a40a4f3498\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/glance-default-external-api-0" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.650373 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.653490 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/46085e14-7d7e-490a-8078-d2a40a4f3498-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"46085e14-7d7e-490a-8078-d2a40a4f3498\") " pod="openstack/glance-default-external-api-0" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.654026 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/46085e14-7d7e-490a-8078-d2a40a4f3498-logs\") pod \"glance-default-external-api-0\" (UID: \"46085e14-7d7e-490a-8078-d2a40a4f3498\") " pod="openstack/glance-default-external-api-0" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.683233 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46085e14-7d7e-490a-8078-d2a40a4f3498-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"46085e14-7d7e-490a-8078-d2a40a4f3498\") " pod="openstack/glance-default-external-api-0" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.683397 4605 scope.go:117] "RemoveContainer" containerID="984b301f47a273f20907e1126f16f318a05f5bbd8ac04a26a059602d9054e314" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.684270 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/46085e14-7d7e-490a-8078-d2a40a4f3498-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"46085e14-7d7e-490a-8078-d2a40a4f3498\") " pod="openstack/glance-default-external-api-0" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.687938 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kwhch\" (UniqueName: \"kubernetes.io/projected/46085e14-7d7e-490a-8078-d2a40a4f3498-kube-api-access-kwhch\") pod \"glance-default-external-api-0\" (UID: \"46085e14-7d7e-490a-8078-d2a40a4f3498\") " pod="openstack/glance-default-external-api-0" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.694370 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/46085e14-7d7e-490a-8078-d2a40a4f3498-scripts\") pod \"glance-default-external-api-0\" (UID: \"46085e14-7d7e-490a-8078-d2a40a4f3498\") " pod="openstack/glance-default-external-api-0" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.697205 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/46085e14-7d7e-490a-8078-d2a40a4f3498-config-data\") pod \"glance-default-external-api-0\" (UID: \"46085e14-7d7e-490a-8078-d2a40a4f3498\") " pod="openstack/glance-default-external-api-0" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.730304 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"46085e14-7d7e-490a-8078-d2a40a4f3498\") " pod="openstack/glance-default-external-api-0" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.823464 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.869325 4605 scope.go:117] "RemoveContainer" containerID="adc05c7cfe78773415b1f63f63cbc496daf588464d17aa7271edbd1f5222cf42" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.968662 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2cc1af3d-7d60-4b25-9b13-d563d3f0e31b" path="/var/lib/kubelet/pods/2cc1af3d-7d60-4b25-9b13-d563d3f0e31b/volumes" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.969313 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="77da7acd-19f2-4a61-920d-d341bd5a4598" path="/var/lib/kubelet/pods/77da7acd-19f2-4a61-920d-d341bd5a4598/volumes" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.972530 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ab88d000-56c4-4cba-b2e3-efb25f217503" path="/var/lib/kubelet/pods/ab88d000-56c4-4cba-b2e3-efb25f217503/volumes" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.973509 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd54105c-5b1e-4706-9fe8-83cdca837998" path="/var/lib/kubelet/pods/bd54105c-5b1e-4706-9fe8-83cdca837998/volumes" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.975694 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f94fe2a1-eaf1-4f5d-9f7e-4e789f46d81c" path="/var/lib/kubelet/pods/f94fe2a1-eaf1-4f5d-9f7e-4e789f46d81c/volumes" Oct 01 14:02:39 crc kubenswrapper[4605]: I1001 14:02:39.976254 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-64d6df575b-5ctbf" event={"ID":"80fb1c51-bd86-4896-8dac-59747473f066","Type":"ContainerStarted","Data":"0b145cd8afc897d045e352e5f5f19e9ff8e5923157ba15737c0e22a5ce2b1798"} Oct 01 14:02:40 crc kubenswrapper[4605]: I1001 14:02:40.004854 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-bkj69" event={"ID":"f17ed625-db31-40c7-9958-f4b89f66ffa0","Type":"ContainerStarted","Data":"f181348df26d79ee4c6e63fb8ad765de7771b451455895a1ee5602ae1069ed2e"} Oct 01 14:02:40 crc kubenswrapper[4605]: I1001 14:02:40.063992 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-bkj69" podStartSLOduration=33.063975539 podStartE2EDuration="33.063975539s" podCreationTimestamp="2025-10-01 14:02:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 14:02:40.060561412 +0000 UTC m=+1082.804537620" watchObservedRunningTime="2025-10-01 14:02:40.063975539 +0000 UTC m=+1082.807951747" Oct 01 14:02:40 crc kubenswrapper[4605]: I1001 14:02:40.095885 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-749477d64b-5dpnm"] Oct 01 14:02:40 crc kubenswrapper[4605]: W1001 14:02:40.112141 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod19af3ac5_8b1a_4301_88a2_96ae085ee9e0.slice/crio-d328797382c8dacd49fad4c3955ddc7b0e591a725e04cf9f988664bc97196fc8 WatchSource:0}: Error finding container d328797382c8dacd49fad4c3955ddc7b0e591a725e04cf9f988664bc97196fc8: Status 404 returned error can't find the container with id d328797382c8dacd49fad4c3955ddc7b0e591a725e04cf9f988664bc97196fc8 Oct 01 14:02:40 crc kubenswrapper[4605]: I1001 14:02:40.140365 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-kdhkw"] Oct 01 14:02:40 crc kubenswrapper[4605]: I1001 14:02:40.611383 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 01 14:02:41 crc kubenswrapper[4605]: I1001 14:02:41.052308 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"46085e14-7d7e-490a-8078-d2a40a4f3498","Type":"ContainerStarted","Data":"56b63fe7ec1291dd82a36bf5af722665830764d6e1581f43d2928324525ad54c"} Oct 01 14:02:41 crc kubenswrapper[4605]: I1001 14:02:41.056011 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-px7rk" event={"ID":"a3bd053b-19f8-4908-b8d5-e5c0ae5599c0","Type":"ContainerStarted","Data":"d5c5696a982b9c00fbd496fa6dbc44a3ec17cc7729664ae0dcb2fd3916a2f948"} Oct 01 14:02:41 crc kubenswrapper[4605]: I1001 14:02:41.061711 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-64d6df575b-5ctbf" event={"ID":"80fb1c51-bd86-4896-8dac-59747473f066","Type":"ContainerStarted","Data":"10a3b93c9ad184b85f901f6c04f2b3a5dcf062ca68d2ed2050daa2aaf98f6ceb"} Oct 01 14:02:41 crc kubenswrapper[4605]: I1001 14:02:41.061815 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-64d6df575b-5ctbf" event={"ID":"80fb1c51-bd86-4896-8dac-59747473f066","Type":"ContainerStarted","Data":"d93481437ed64e0c617d8a3c8b7940558e661e7b1d2543da0c1a1bdfad5ce8da"} Oct 01 14:02:41 crc kubenswrapper[4605]: I1001 14:02:41.067602 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-kdhkw" event={"ID":"9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb","Type":"ContainerStarted","Data":"5b4342682e1e4940b2836701ee9c809427752dc8e6f119c53e53b4610e2cf60a"} Oct 01 14:02:41 crc kubenswrapper[4605]: I1001 14:02:41.067647 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-kdhkw" event={"ID":"9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb","Type":"ContainerStarted","Data":"ffb4f9ecd159f1c0fa696cfce4f2c82168f923b6fbccd3f1d65fe388b0b3bd71"} Oct 01 14:02:41 crc kubenswrapper[4605]: I1001 14:02:41.072369 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-749477d64b-5dpnm" event={"ID":"19af3ac5-8b1a-4301-88a2-96ae085ee9e0","Type":"ContainerStarted","Data":"423669f3dc500c10e2f1d04f16ebcb37769e2415aca599a5a0c901725e42b5f9"} Oct 01 14:02:41 crc kubenswrapper[4605]: I1001 14:02:41.072433 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-749477d64b-5dpnm" event={"ID":"19af3ac5-8b1a-4301-88a2-96ae085ee9e0","Type":"ContainerStarted","Data":"6a2599a0ed41e906ee251f3a471189a1a7710e3f7990ee28a804ea625997a8bf"} Oct 01 14:02:41 crc kubenswrapper[4605]: I1001 14:02:41.072448 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-749477d64b-5dpnm" event={"ID":"19af3ac5-8b1a-4301-88a2-96ae085ee9e0","Type":"ContainerStarted","Data":"d328797382c8dacd49fad4c3955ddc7b0e591a725e04cf9f988664bc97196fc8"} Oct 01 14:02:41 crc kubenswrapper[4605]: I1001 14:02:41.110668 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-px7rk" podStartSLOduration=3.61806581 podStartE2EDuration="41.110647126s" podCreationTimestamp="2025-10-01 14:02:00 +0000 UTC" firstStartedPulling="2025-10-01 14:02:01.988690728 +0000 UTC m=+1044.732666936" lastFinishedPulling="2025-10-01 14:02:39.481272044 +0000 UTC m=+1082.225248252" observedRunningTime="2025-10-01 14:02:41.078549042 +0000 UTC m=+1083.822525310" watchObservedRunningTime="2025-10-01 14:02:41.110647126 +0000 UTC m=+1083.854623344" Oct 01 14:02:41 crc kubenswrapper[4605]: I1001 14:02:41.141259 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-64d6df575b-5ctbf" podStartSLOduration=27.509261566 podStartE2EDuration="28.141235642s" podCreationTimestamp="2025-10-01 14:02:13 +0000 UTC" firstStartedPulling="2025-10-01 14:02:39.481367956 +0000 UTC m=+1082.225344164" lastFinishedPulling="2025-10-01 14:02:40.113342032 +0000 UTC m=+1082.857318240" observedRunningTime="2025-10-01 14:02:41.136994215 +0000 UTC m=+1083.880970423" watchObservedRunningTime="2025-10-01 14:02:41.141235642 +0000 UTC m=+1083.885211850" Oct 01 14:02:41 crc kubenswrapper[4605]: I1001 14:02:41.144791 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-kdhkw" podStartSLOduration=19.144774072 podStartE2EDuration="19.144774072s" podCreationTimestamp="2025-10-01 14:02:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 14:02:41.109473056 +0000 UTC m=+1083.853449264" watchObservedRunningTime="2025-10-01 14:02:41.144774072 +0000 UTC m=+1083.888750270" Oct 01 14:02:41 crc kubenswrapper[4605]: I1001 14:02:41.166471 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-749477d64b-5dpnm" podStartSLOduration=28.166452132 podStartE2EDuration="28.166452132s" podCreationTimestamp="2025-10-01 14:02:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 14:02:41.154426927 +0000 UTC m=+1083.898403145" watchObservedRunningTime="2025-10-01 14:02:41.166452132 +0000 UTC m=+1083.910428340" Oct 01 14:02:41 crc kubenswrapper[4605]: I1001 14:02:41.322568 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 01 14:02:42 crc kubenswrapper[4605]: I1001 14:02:42.106524 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"46085e14-7d7e-490a-8078-d2a40a4f3498","Type":"ContainerStarted","Data":"1f9765bf6a057c7fb2b1034b8d8edffcad1efa83c7bc3f02af8c15cd97df0a4a"} Oct 01 14:02:42 crc kubenswrapper[4605]: I1001 14:02:42.109841 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d","Type":"ContainerStarted","Data":"292676769d30f49539bb03392db2114220120cee6f940246def06af3828c68ac"} Oct 01 14:02:42 crc kubenswrapper[4605]: I1001 14:02:42.109869 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d","Type":"ContainerStarted","Data":"d7b8722a83fa5afa1facb63a139d20f997ec03365a44b75aab30cfba792ae70e"} Oct 01 14:02:43 crc kubenswrapper[4605]: I1001 14:02:43.145123 4605 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-77585f5f8c-gxlcx" podUID="bd54105c-5b1e-4706-9fe8-83cdca837998" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.131:5353: i/o timeout" Oct 01 14:02:43 crc kubenswrapper[4605]: I1001 14:02:43.165427 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d","Type":"ContainerStarted","Data":"7635e8638a970607c746051057cbf7b68b7eaa1b3669d8943796c103b67ffd15"} Oct 01 14:02:43 crc kubenswrapper[4605]: I1001 14:02:43.177876 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"46085e14-7d7e-490a-8078-d2a40a4f3498","Type":"ContainerStarted","Data":"ebe349f06b2cd7eb9c3f8125823bdea98434ad65928df64bbd74ad27f86de5c1"} Oct 01 14:02:43 crc kubenswrapper[4605]: I1001 14:02:43.194603 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=4.194587731 podStartE2EDuration="4.194587731s" podCreationTimestamp="2025-10-01 14:02:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 14:02:43.189704897 +0000 UTC m=+1085.933681105" watchObservedRunningTime="2025-10-01 14:02:43.194587731 +0000 UTC m=+1085.938563939" Oct 01 14:02:43 crc kubenswrapper[4605]: I1001 14:02:43.228866 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=4.22884425 podStartE2EDuration="4.22884425s" podCreationTimestamp="2025-10-01 14:02:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 14:02:43.22252664 +0000 UTC m=+1085.966502848" watchObservedRunningTime="2025-10-01 14:02:43.22884425 +0000 UTC m=+1085.972820448" Oct 01 14:02:43 crc kubenswrapper[4605]: I1001 14:02:43.526871 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-749477d64b-5dpnm" Oct 01 14:02:43 crc kubenswrapper[4605]: I1001 14:02:43.526928 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-749477d64b-5dpnm" Oct 01 14:02:43 crc kubenswrapper[4605]: I1001 14:02:43.621741 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-64d6df575b-5ctbf" Oct 01 14:02:43 crc kubenswrapper[4605]: I1001 14:02:43.621785 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-64d6df575b-5ctbf" Oct 01 14:02:47 crc kubenswrapper[4605]: I1001 14:02:47.213499 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"df36773f-c59f-4abb-9adf-20dee81012ae","Type":"ContainerStarted","Data":"1d26b7723ab97797c3971a3b49b924379e5550bb8c3fce715dd2885b4d6bd82a"} Oct 01 14:02:47 crc kubenswrapper[4605]: I1001 14:02:47.216032 4605 generic.go:334] "Generic (PLEG): container finished" podID="a3bd053b-19f8-4908-b8d5-e5c0ae5599c0" containerID="d5c5696a982b9c00fbd496fa6dbc44a3ec17cc7729664ae0dcb2fd3916a2f948" exitCode=0 Oct 01 14:02:47 crc kubenswrapper[4605]: I1001 14:02:47.216158 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-px7rk" event={"ID":"a3bd053b-19f8-4908-b8d5-e5c0ae5599c0","Type":"ContainerDied","Data":"d5c5696a982b9c00fbd496fa6dbc44a3ec17cc7729664ae0dcb2fd3916a2f948"} Oct 01 14:02:47 crc kubenswrapper[4605]: I1001 14:02:47.218115 4605 generic.go:334] "Generic (PLEG): container finished" podID="9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb" containerID="5b4342682e1e4940b2836701ee9c809427752dc8e6f119c53e53b4610e2cf60a" exitCode=0 Oct 01 14:02:47 crc kubenswrapper[4605]: I1001 14:02:47.218153 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-kdhkw" event={"ID":"9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb","Type":"ContainerDied","Data":"5b4342682e1e4940b2836701ee9c809427752dc8e6f119c53e53b4610e2cf60a"} Oct 01 14:02:48 crc kubenswrapper[4605]: I1001 14:02:48.594560 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-kdhkw" Oct 01 14:02:48 crc kubenswrapper[4605]: I1001 14:02:48.598046 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-px7rk" Oct 01 14:02:48 crc kubenswrapper[4605]: I1001 14:02:48.650051 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a3bd053b-19f8-4908-b8d5-e5c0ae5599c0-logs\") pod \"a3bd053b-19f8-4908-b8d5-e5c0ae5599c0\" (UID: \"a3bd053b-19f8-4908-b8d5-e5c0ae5599c0\") " Oct 01 14:02:48 crc kubenswrapper[4605]: I1001 14:02:48.650115 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n5krx\" (UniqueName: \"kubernetes.io/projected/9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb-kube-api-access-n5krx\") pod \"9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb\" (UID: \"9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb\") " Oct 01 14:02:48 crc kubenswrapper[4605]: I1001 14:02:48.650151 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3bd053b-19f8-4908-b8d5-e5c0ae5599c0-combined-ca-bundle\") pod \"a3bd053b-19f8-4908-b8d5-e5c0ae5599c0\" (UID: \"a3bd053b-19f8-4908-b8d5-e5c0ae5599c0\") " Oct 01 14:02:48 crc kubenswrapper[4605]: I1001 14:02:48.650193 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a3bd053b-19f8-4908-b8d5-e5c0ae5599c0-scripts\") pod \"a3bd053b-19f8-4908-b8d5-e5c0ae5599c0\" (UID: \"a3bd053b-19f8-4908-b8d5-e5c0ae5599c0\") " Oct 01 14:02:48 crc kubenswrapper[4605]: I1001 14:02:48.650232 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb-combined-ca-bundle\") pod \"9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb\" (UID: \"9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb\") " Oct 01 14:02:48 crc kubenswrapper[4605]: I1001 14:02:48.650267 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb-scripts\") pod \"9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb\" (UID: \"9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb\") " Oct 01 14:02:48 crc kubenswrapper[4605]: I1001 14:02:48.650297 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mkrtd\" (UniqueName: \"kubernetes.io/projected/a3bd053b-19f8-4908-b8d5-e5c0ae5599c0-kube-api-access-mkrtd\") pod \"a3bd053b-19f8-4908-b8d5-e5c0ae5599c0\" (UID: \"a3bd053b-19f8-4908-b8d5-e5c0ae5599c0\") " Oct 01 14:02:48 crc kubenswrapper[4605]: I1001 14:02:48.650324 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb-config-data\") pod \"9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb\" (UID: \"9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb\") " Oct 01 14:02:48 crc kubenswrapper[4605]: I1001 14:02:48.650449 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb-fernet-keys\") pod \"9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb\" (UID: \"9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb\") " Oct 01 14:02:48 crc kubenswrapper[4605]: I1001 14:02:48.650502 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a3bd053b-19f8-4908-b8d5-e5c0ae5599c0-config-data\") pod \"a3bd053b-19f8-4908-b8d5-e5c0ae5599c0\" (UID: \"a3bd053b-19f8-4908-b8d5-e5c0ae5599c0\") " Oct 01 14:02:48 crc kubenswrapper[4605]: I1001 14:02:48.650520 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb-credential-keys\") pod \"9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb\" (UID: \"9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb\") " Oct 01 14:02:48 crc kubenswrapper[4605]: I1001 14:02:48.650507 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a3bd053b-19f8-4908-b8d5-e5c0ae5599c0-logs" (OuterVolumeSpecName: "logs") pod "a3bd053b-19f8-4908-b8d5-e5c0ae5599c0" (UID: "a3bd053b-19f8-4908-b8d5-e5c0ae5599c0"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:02:48 crc kubenswrapper[4605]: I1001 14:02:48.657750 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a3bd053b-19f8-4908-b8d5-e5c0ae5599c0-scripts" (OuterVolumeSpecName: "scripts") pod "a3bd053b-19f8-4908-b8d5-e5c0ae5599c0" (UID: "a3bd053b-19f8-4908-b8d5-e5c0ae5599c0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:02:48 crc kubenswrapper[4605]: I1001 14:02:48.658856 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb" (UID: "9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:02:48 crc kubenswrapper[4605]: I1001 14:02:48.661738 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb-scripts" (OuterVolumeSpecName: "scripts") pod "9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb" (UID: "9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:02:48 crc kubenswrapper[4605]: I1001 14:02:48.662334 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a3bd053b-19f8-4908-b8d5-e5c0ae5599c0-kube-api-access-mkrtd" (OuterVolumeSpecName: "kube-api-access-mkrtd") pod "a3bd053b-19f8-4908-b8d5-e5c0ae5599c0" (UID: "a3bd053b-19f8-4908-b8d5-e5c0ae5599c0"). InnerVolumeSpecName "kube-api-access-mkrtd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:02:48 crc kubenswrapper[4605]: I1001 14:02:48.662374 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb-kube-api-access-n5krx" (OuterVolumeSpecName: "kube-api-access-n5krx") pod "9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb" (UID: "9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb"). InnerVolumeSpecName "kube-api-access-n5krx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:02:48 crc kubenswrapper[4605]: I1001 14:02:48.672888 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb" (UID: "9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:02:48 crc kubenswrapper[4605]: I1001 14:02:48.682817 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a3bd053b-19f8-4908-b8d5-e5c0ae5599c0-config-data" (OuterVolumeSpecName: "config-data") pod "a3bd053b-19f8-4908-b8d5-e5c0ae5599c0" (UID: "a3bd053b-19f8-4908-b8d5-e5c0ae5599c0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:02:48 crc kubenswrapper[4605]: I1001 14:02:48.683248 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a3bd053b-19f8-4908-b8d5-e5c0ae5599c0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a3bd053b-19f8-4908-b8d5-e5c0ae5599c0" (UID: "a3bd053b-19f8-4908-b8d5-e5c0ae5599c0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:02:48 crc kubenswrapper[4605]: I1001 14:02:48.685230 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb-config-data" (OuterVolumeSpecName: "config-data") pod "9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb" (UID: "9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:02:48 crc kubenswrapper[4605]: I1001 14:02:48.700799 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb" (UID: "9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:02:48 crc kubenswrapper[4605]: I1001 14:02:48.753604 4605 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:48 crc kubenswrapper[4605]: I1001 14:02:48.753674 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mkrtd\" (UniqueName: \"kubernetes.io/projected/a3bd053b-19f8-4908-b8d5-e5c0ae5599c0-kube-api-access-mkrtd\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:48 crc kubenswrapper[4605]: I1001 14:02:48.753686 4605 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:48 crc kubenswrapper[4605]: I1001 14:02:48.753696 4605 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:48 crc kubenswrapper[4605]: I1001 14:02:48.753704 4605 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a3bd053b-19f8-4908-b8d5-e5c0ae5599c0-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:48 crc kubenswrapper[4605]: I1001 14:02:48.753712 4605 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb-credential-keys\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:48 crc kubenswrapper[4605]: I1001 14:02:48.753721 4605 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a3bd053b-19f8-4908-b8d5-e5c0ae5599c0-logs\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:48 crc kubenswrapper[4605]: I1001 14:02:48.753729 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n5krx\" (UniqueName: \"kubernetes.io/projected/9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb-kube-api-access-n5krx\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:48 crc kubenswrapper[4605]: I1001 14:02:48.753736 4605 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3bd053b-19f8-4908-b8d5-e5c0ae5599c0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:48 crc kubenswrapper[4605]: I1001 14:02:48.753743 4605 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a3bd053b-19f8-4908-b8d5-e5c0ae5599c0-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:48 crc kubenswrapper[4605]: I1001 14:02:48.753751 4605 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.236267 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-px7rk" event={"ID":"a3bd053b-19f8-4908-b8d5-e5c0ae5599c0","Type":"ContainerDied","Data":"c3950ca679c87391d6c061a9f85bbb19c59433cde3b17f9f0c20c08921b91c6e"} Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.236334 4605 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c3950ca679c87391d6c061a9f85bbb19c59433cde3b17f9f0c20c08921b91c6e" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.236400 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-px7rk" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.239786 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-kdhkw" event={"ID":"9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb","Type":"ContainerDied","Data":"ffb4f9ecd159f1c0fa696cfce4f2c82168f923b6fbccd3f1d65fe388b0b3bd71"} Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.239938 4605 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ffb4f9ecd159f1c0fa696cfce4f2c82168f923b6fbccd3f1d65fe388b0b3bd71" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.240073 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-kdhkw" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.345493 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-5cc6b8b7dd-f8khf"] Oct 01 14:02:49 crc kubenswrapper[4605]: E1001 14:02:49.346199 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb" containerName="keystone-bootstrap" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.346369 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb" containerName="keystone-bootstrap" Oct 01 14:02:49 crc kubenswrapper[4605]: E1001 14:02:49.346485 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3bd053b-19f8-4908-b8d5-e5c0ae5599c0" containerName="placement-db-sync" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.346548 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3bd053b-19f8-4908-b8d5-e5c0ae5599c0" containerName="placement-db-sync" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.346786 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb" containerName="keystone-bootstrap" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.346875 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="a3bd053b-19f8-4908-b8d5-e5c0ae5599c0" containerName="placement-db-sync" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.348022 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-5cc6b8b7dd-f8khf" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.351671 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-5tvxv" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.351879 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.352007 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.352230 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.352348 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.372015 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-5cc6b8b7dd-f8khf"] Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.443521 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-6964b49dc5-fgw45"] Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.444715 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-6964b49dc5-fgw45" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.449725 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.449901 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.450051 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.450234 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-t7vm5" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.450774 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.450877 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.468410 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-6964b49dc5-fgw45"] Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.468948 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2524f72b-05b3-4299-90d6-4671b410d59a-logs\") pod \"placement-5cc6b8b7dd-f8khf\" (UID: \"2524f72b-05b3-4299-90d6-4671b410d59a\") " pod="openstack/placement-5cc6b8b7dd-f8khf" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.469021 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2524f72b-05b3-4299-90d6-4671b410d59a-public-tls-certs\") pod \"placement-5cc6b8b7dd-f8khf\" (UID: \"2524f72b-05b3-4299-90d6-4671b410d59a\") " pod="openstack/placement-5cc6b8b7dd-f8khf" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.469068 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6fc9l\" (UniqueName: \"kubernetes.io/projected/2524f72b-05b3-4299-90d6-4671b410d59a-kube-api-access-6fc9l\") pod \"placement-5cc6b8b7dd-f8khf\" (UID: \"2524f72b-05b3-4299-90d6-4671b410d59a\") " pod="openstack/placement-5cc6b8b7dd-f8khf" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.469100 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2524f72b-05b3-4299-90d6-4671b410d59a-internal-tls-certs\") pod \"placement-5cc6b8b7dd-f8khf\" (UID: \"2524f72b-05b3-4299-90d6-4671b410d59a\") " pod="openstack/placement-5cc6b8b7dd-f8khf" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.469121 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2524f72b-05b3-4299-90d6-4671b410d59a-combined-ca-bundle\") pod \"placement-5cc6b8b7dd-f8khf\" (UID: \"2524f72b-05b3-4299-90d6-4671b410d59a\") " pod="openstack/placement-5cc6b8b7dd-f8khf" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.469149 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2524f72b-05b3-4299-90d6-4671b410d59a-config-data\") pod \"placement-5cc6b8b7dd-f8khf\" (UID: \"2524f72b-05b3-4299-90d6-4671b410d59a\") " pod="openstack/placement-5cc6b8b7dd-f8khf" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.469202 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2524f72b-05b3-4299-90d6-4671b410d59a-scripts\") pod \"placement-5cc6b8b7dd-f8khf\" (UID: \"2524f72b-05b3-4299-90d6-4671b410d59a\") " pod="openstack/placement-5cc6b8b7dd-f8khf" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.570714 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2524f72b-05b3-4299-90d6-4671b410d59a-scripts\") pod \"placement-5cc6b8b7dd-f8khf\" (UID: \"2524f72b-05b3-4299-90d6-4671b410d59a\") " pod="openstack/placement-5cc6b8b7dd-f8khf" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.570967 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/471f90e3-9942-4516-ad5a-26cddd148bd4-config-data\") pod \"keystone-6964b49dc5-fgw45\" (UID: \"471f90e3-9942-4516-ad5a-26cddd148bd4\") " pod="openstack/keystone-6964b49dc5-fgw45" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.571040 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2524f72b-05b3-4299-90d6-4671b410d59a-logs\") pod \"placement-5cc6b8b7dd-f8khf\" (UID: \"2524f72b-05b3-4299-90d6-4671b410d59a\") " pod="openstack/placement-5cc6b8b7dd-f8khf" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.571141 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/471f90e3-9942-4516-ad5a-26cddd148bd4-combined-ca-bundle\") pod \"keystone-6964b49dc5-fgw45\" (UID: \"471f90e3-9942-4516-ad5a-26cddd148bd4\") " pod="openstack/keystone-6964b49dc5-fgw45" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.571228 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/471f90e3-9942-4516-ad5a-26cddd148bd4-credential-keys\") pod \"keystone-6964b49dc5-fgw45\" (UID: \"471f90e3-9942-4516-ad5a-26cddd148bd4\") " pod="openstack/keystone-6964b49dc5-fgw45" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.571333 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2524f72b-05b3-4299-90d6-4671b410d59a-public-tls-certs\") pod \"placement-5cc6b8b7dd-f8khf\" (UID: \"2524f72b-05b3-4299-90d6-4671b410d59a\") " pod="openstack/placement-5cc6b8b7dd-f8khf" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.571405 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/471f90e3-9942-4516-ad5a-26cddd148bd4-internal-tls-certs\") pod \"keystone-6964b49dc5-fgw45\" (UID: \"471f90e3-9942-4516-ad5a-26cddd148bd4\") " pod="openstack/keystone-6964b49dc5-fgw45" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.571494 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8nvkb\" (UniqueName: \"kubernetes.io/projected/471f90e3-9942-4516-ad5a-26cddd148bd4-kube-api-access-8nvkb\") pod \"keystone-6964b49dc5-fgw45\" (UID: \"471f90e3-9942-4516-ad5a-26cddd148bd4\") " pod="openstack/keystone-6964b49dc5-fgw45" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.571568 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6fc9l\" (UniqueName: \"kubernetes.io/projected/2524f72b-05b3-4299-90d6-4671b410d59a-kube-api-access-6fc9l\") pod \"placement-5cc6b8b7dd-f8khf\" (UID: \"2524f72b-05b3-4299-90d6-4671b410d59a\") " pod="openstack/placement-5cc6b8b7dd-f8khf" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.571670 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2524f72b-05b3-4299-90d6-4671b410d59a-internal-tls-certs\") pod \"placement-5cc6b8b7dd-f8khf\" (UID: \"2524f72b-05b3-4299-90d6-4671b410d59a\") " pod="openstack/placement-5cc6b8b7dd-f8khf" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.571759 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2524f72b-05b3-4299-90d6-4671b410d59a-combined-ca-bundle\") pod \"placement-5cc6b8b7dd-f8khf\" (UID: \"2524f72b-05b3-4299-90d6-4671b410d59a\") " pod="openstack/placement-5cc6b8b7dd-f8khf" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.571822 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/471f90e3-9942-4516-ad5a-26cddd148bd4-public-tls-certs\") pod \"keystone-6964b49dc5-fgw45\" (UID: \"471f90e3-9942-4516-ad5a-26cddd148bd4\") " pod="openstack/keystone-6964b49dc5-fgw45" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.571912 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2524f72b-05b3-4299-90d6-4671b410d59a-config-data\") pod \"placement-5cc6b8b7dd-f8khf\" (UID: \"2524f72b-05b3-4299-90d6-4671b410d59a\") " pod="openstack/placement-5cc6b8b7dd-f8khf" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.572014 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/471f90e3-9942-4516-ad5a-26cddd148bd4-fernet-keys\") pod \"keystone-6964b49dc5-fgw45\" (UID: \"471f90e3-9942-4516-ad5a-26cddd148bd4\") " pod="openstack/keystone-6964b49dc5-fgw45" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.571415 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2524f72b-05b3-4299-90d6-4671b410d59a-logs\") pod \"placement-5cc6b8b7dd-f8khf\" (UID: \"2524f72b-05b3-4299-90d6-4671b410d59a\") " pod="openstack/placement-5cc6b8b7dd-f8khf" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.572182 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/471f90e3-9942-4516-ad5a-26cddd148bd4-scripts\") pod \"keystone-6964b49dc5-fgw45\" (UID: \"471f90e3-9942-4516-ad5a-26cddd148bd4\") " pod="openstack/keystone-6964b49dc5-fgw45" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.576399 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2524f72b-05b3-4299-90d6-4671b410d59a-scripts\") pod \"placement-5cc6b8b7dd-f8khf\" (UID: \"2524f72b-05b3-4299-90d6-4671b410d59a\") " pod="openstack/placement-5cc6b8b7dd-f8khf" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.577711 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2524f72b-05b3-4299-90d6-4671b410d59a-combined-ca-bundle\") pod \"placement-5cc6b8b7dd-f8khf\" (UID: \"2524f72b-05b3-4299-90d6-4671b410d59a\") " pod="openstack/placement-5cc6b8b7dd-f8khf" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.578676 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2524f72b-05b3-4299-90d6-4671b410d59a-config-data\") pod \"placement-5cc6b8b7dd-f8khf\" (UID: \"2524f72b-05b3-4299-90d6-4671b410d59a\") " pod="openstack/placement-5cc6b8b7dd-f8khf" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.581006 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2524f72b-05b3-4299-90d6-4671b410d59a-internal-tls-certs\") pod \"placement-5cc6b8b7dd-f8khf\" (UID: \"2524f72b-05b3-4299-90d6-4671b410d59a\") " pod="openstack/placement-5cc6b8b7dd-f8khf" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.588073 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2524f72b-05b3-4299-90d6-4671b410d59a-public-tls-certs\") pod \"placement-5cc6b8b7dd-f8khf\" (UID: \"2524f72b-05b3-4299-90d6-4671b410d59a\") " pod="openstack/placement-5cc6b8b7dd-f8khf" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.594730 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6fc9l\" (UniqueName: \"kubernetes.io/projected/2524f72b-05b3-4299-90d6-4671b410d59a-kube-api-access-6fc9l\") pod \"placement-5cc6b8b7dd-f8khf\" (UID: \"2524f72b-05b3-4299-90d6-4671b410d59a\") " pod="openstack/placement-5cc6b8b7dd-f8khf" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.654540 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.654583 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.663600 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-5cc6b8b7dd-f8khf" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.673883 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8nvkb\" (UniqueName: \"kubernetes.io/projected/471f90e3-9942-4516-ad5a-26cddd148bd4-kube-api-access-8nvkb\") pod \"keystone-6964b49dc5-fgw45\" (UID: \"471f90e3-9942-4516-ad5a-26cddd148bd4\") " pod="openstack/keystone-6964b49dc5-fgw45" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.673949 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/471f90e3-9942-4516-ad5a-26cddd148bd4-public-tls-certs\") pod \"keystone-6964b49dc5-fgw45\" (UID: \"471f90e3-9942-4516-ad5a-26cddd148bd4\") " pod="openstack/keystone-6964b49dc5-fgw45" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.674029 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/471f90e3-9942-4516-ad5a-26cddd148bd4-fernet-keys\") pod \"keystone-6964b49dc5-fgw45\" (UID: \"471f90e3-9942-4516-ad5a-26cddd148bd4\") " pod="openstack/keystone-6964b49dc5-fgw45" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.674053 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/471f90e3-9942-4516-ad5a-26cddd148bd4-scripts\") pod \"keystone-6964b49dc5-fgw45\" (UID: \"471f90e3-9942-4516-ad5a-26cddd148bd4\") " pod="openstack/keystone-6964b49dc5-fgw45" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.674140 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/471f90e3-9942-4516-ad5a-26cddd148bd4-config-data\") pod \"keystone-6964b49dc5-fgw45\" (UID: \"471f90e3-9942-4516-ad5a-26cddd148bd4\") " pod="openstack/keystone-6964b49dc5-fgw45" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.674194 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/471f90e3-9942-4516-ad5a-26cddd148bd4-combined-ca-bundle\") pod \"keystone-6964b49dc5-fgw45\" (UID: \"471f90e3-9942-4516-ad5a-26cddd148bd4\") " pod="openstack/keystone-6964b49dc5-fgw45" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.674219 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/471f90e3-9942-4516-ad5a-26cddd148bd4-credential-keys\") pod \"keystone-6964b49dc5-fgw45\" (UID: \"471f90e3-9942-4516-ad5a-26cddd148bd4\") " pod="openstack/keystone-6964b49dc5-fgw45" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.674261 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/471f90e3-9942-4516-ad5a-26cddd148bd4-internal-tls-certs\") pod \"keystone-6964b49dc5-fgw45\" (UID: \"471f90e3-9942-4516-ad5a-26cddd148bd4\") " pod="openstack/keystone-6964b49dc5-fgw45" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.679680 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/471f90e3-9942-4516-ad5a-26cddd148bd4-scripts\") pod \"keystone-6964b49dc5-fgw45\" (UID: \"471f90e3-9942-4516-ad5a-26cddd148bd4\") " pod="openstack/keystone-6964b49dc5-fgw45" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.680208 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/471f90e3-9942-4516-ad5a-26cddd148bd4-combined-ca-bundle\") pod \"keystone-6964b49dc5-fgw45\" (UID: \"471f90e3-9942-4516-ad5a-26cddd148bd4\") " pod="openstack/keystone-6964b49dc5-fgw45" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.680873 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/471f90e3-9942-4516-ad5a-26cddd148bd4-config-data\") pod \"keystone-6964b49dc5-fgw45\" (UID: \"471f90e3-9942-4516-ad5a-26cddd148bd4\") " pod="openstack/keystone-6964b49dc5-fgw45" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.681751 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/471f90e3-9942-4516-ad5a-26cddd148bd4-credential-keys\") pod \"keystone-6964b49dc5-fgw45\" (UID: \"471f90e3-9942-4516-ad5a-26cddd148bd4\") " pod="openstack/keystone-6964b49dc5-fgw45" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.681918 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/471f90e3-9942-4516-ad5a-26cddd148bd4-fernet-keys\") pod \"keystone-6964b49dc5-fgw45\" (UID: \"471f90e3-9942-4516-ad5a-26cddd148bd4\") " pod="openstack/keystone-6964b49dc5-fgw45" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.684584 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/471f90e3-9942-4516-ad5a-26cddd148bd4-public-tls-certs\") pod \"keystone-6964b49dc5-fgw45\" (UID: \"471f90e3-9942-4516-ad5a-26cddd148bd4\") " pod="openstack/keystone-6964b49dc5-fgw45" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.684688 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/471f90e3-9942-4516-ad5a-26cddd148bd4-internal-tls-certs\") pod \"keystone-6964b49dc5-fgw45\" (UID: \"471f90e3-9942-4516-ad5a-26cddd148bd4\") " pod="openstack/keystone-6964b49dc5-fgw45" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.707154 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.721250 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.721914 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8nvkb\" (UniqueName: \"kubernetes.io/projected/471f90e3-9942-4516-ad5a-26cddd148bd4-kube-api-access-8nvkb\") pod \"keystone-6964b49dc5-fgw45\" (UID: \"471f90e3-9942-4516-ad5a-26cddd148bd4\") " pod="openstack/keystone-6964b49dc5-fgw45" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.761671 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-6964b49dc5-fgw45" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.834440 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.834761 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.900825 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 01 14:02:49 crc kubenswrapper[4605]: I1001 14:02:49.917341 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 01 14:02:50 crc kubenswrapper[4605]: I1001 14:02:50.161592 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-5cc6b8b7dd-f8khf"] Oct 01 14:02:50 crc kubenswrapper[4605]: W1001 14:02:50.170822 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2524f72b_05b3_4299_90d6_4671b410d59a.slice/crio-b4b45d6d1d78fd5f00bc99a5a86b81c2c5e3fee3ae39fa391e4a6ce11a2d140f WatchSource:0}: Error finding container b4b45d6d1d78fd5f00bc99a5a86b81c2c5e3fee3ae39fa391e4a6ce11a2d140f: Status 404 returned error can't find the container with id b4b45d6d1d78fd5f00bc99a5a86b81c2c5e3fee3ae39fa391e4a6ce11a2d140f Oct 01 14:02:50 crc kubenswrapper[4605]: I1001 14:02:50.248577 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-5cc6b8b7dd-f8khf" event={"ID":"2524f72b-05b3-4299-90d6-4671b410d59a","Type":"ContainerStarted","Data":"b4b45d6d1d78fd5f00bc99a5a86b81c2c5e3fee3ae39fa391e4a6ce11a2d140f"} Oct 01 14:02:50 crc kubenswrapper[4605]: I1001 14:02:50.249530 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 01 14:02:50 crc kubenswrapper[4605]: I1001 14:02:50.249742 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 01 14:02:50 crc kubenswrapper[4605]: I1001 14:02:50.249763 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 01 14:02:50 crc kubenswrapper[4605]: I1001 14:02:50.249774 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 01 14:02:50 crc kubenswrapper[4605]: I1001 14:02:50.340839 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-6964b49dc5-fgw45"] Oct 01 14:02:51 crc kubenswrapper[4605]: I1001 14:02:51.258981 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-5cc6b8b7dd-f8khf" event={"ID":"2524f72b-05b3-4299-90d6-4671b410d59a","Type":"ContainerStarted","Data":"f5c4be6c6f6b9d6f6a9c25bd505f78cb7d2a0e66226c2fea0876482250c91490"} Oct 01 14:02:51 crc kubenswrapper[4605]: I1001 14:02:51.259353 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-5cc6b8b7dd-f8khf" event={"ID":"2524f72b-05b3-4299-90d6-4671b410d59a","Type":"ContainerStarted","Data":"693eac6974457e42882d1aa890be3182c47bd1d0e1139121c7a1c92ab8ac91f8"} Oct 01 14:02:51 crc kubenswrapper[4605]: I1001 14:02:51.259473 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-5cc6b8b7dd-f8khf" Oct 01 14:02:51 crc kubenswrapper[4605]: I1001 14:02:51.259493 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-5cc6b8b7dd-f8khf" Oct 01 14:02:51 crc kubenswrapper[4605]: I1001 14:02:51.261370 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-6964b49dc5-fgw45" event={"ID":"471f90e3-9942-4516-ad5a-26cddd148bd4","Type":"ContainerStarted","Data":"43619babfe3cf4f91d3ac0df9fd7d121405ecebef5340016077afae121d99b9a"} Oct 01 14:02:51 crc kubenswrapper[4605]: I1001 14:02:51.261472 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-6964b49dc5-fgw45" event={"ID":"471f90e3-9942-4516-ad5a-26cddd148bd4","Type":"ContainerStarted","Data":"df2e939463e9428efc6ea2d7a4d3a8b64d2d1a3f28c6589ea873f58c023466ec"} Oct 01 14:02:51 crc kubenswrapper[4605]: I1001 14:02:51.327105 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-5cc6b8b7dd-f8khf" podStartSLOduration=2.327078533 podStartE2EDuration="2.327078533s" podCreationTimestamp="2025-10-01 14:02:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 14:02:51.309761224 +0000 UTC m=+1094.053737432" watchObservedRunningTime="2025-10-01 14:02:51.327078533 +0000 UTC m=+1094.071054741" Oct 01 14:02:51 crc kubenswrapper[4605]: I1001 14:02:51.327986 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-6964b49dc5-fgw45" podStartSLOduration=2.327981196 podStartE2EDuration="2.327981196s" podCreationTimestamp="2025-10-01 14:02:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 14:02:51.32538822 +0000 UTC m=+1094.069364428" watchObservedRunningTime="2025-10-01 14:02:51.327981196 +0000 UTC m=+1094.071957404" Oct 01 14:02:52 crc kubenswrapper[4605]: I1001 14:02:52.268748 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-6964b49dc5-fgw45" Oct 01 14:02:53 crc kubenswrapper[4605]: I1001 14:02:53.529802 4605 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-749477d64b-5dpnm" podUID="19af3ac5-8b1a-4301-88a2-96ae085ee9e0" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.150:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.150:8443: connect: connection refused" Oct 01 14:02:53 crc kubenswrapper[4605]: I1001 14:02:53.623495 4605 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-64d6df575b-5ctbf" podUID="80fb1c51-bd86-4896-8dac-59747473f066" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.151:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.151:8443: connect: connection refused" Oct 01 14:02:54 crc kubenswrapper[4605]: I1001 14:02:54.756766 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 01 14:02:54 crc kubenswrapper[4605]: I1001 14:02:54.757165 4605 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 01 14:02:54 crc kubenswrapper[4605]: I1001 14:02:54.760644 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 01 14:02:54 crc kubenswrapper[4605]: I1001 14:02:54.760718 4605 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 01 14:02:54 crc kubenswrapper[4605]: I1001 14:02:54.764235 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 01 14:02:54 crc kubenswrapper[4605]: I1001 14:02:54.794500 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 01 14:03:03 crc kubenswrapper[4605]: I1001 14:03:03.527671 4605 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-749477d64b-5dpnm" podUID="19af3ac5-8b1a-4301-88a2-96ae085ee9e0" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.150:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.150:8443: connect: connection refused" Oct 01 14:03:03 crc kubenswrapper[4605]: I1001 14:03:03.621162 4605 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-64d6df575b-5ctbf" podUID="80fb1c51-bd86-4896-8dac-59747473f066" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.151:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.151:8443: connect: connection refused" Oct 01 14:03:03 crc kubenswrapper[4605]: E1001 14:03:03.883595 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"ceilometer-central-agent\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"ceilometer-notification-agent\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"]" pod="openstack/ceilometer-0" podUID="df36773f-c59f-4abb-9adf-20dee81012ae" Oct 01 14:03:04 crc kubenswrapper[4605]: I1001 14:03:04.375159 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"df36773f-c59f-4abb-9adf-20dee81012ae","Type":"ContainerStarted","Data":"0fc0377b2029f37604c0c4bda103a216896c7fc8074deb6e8a7f544713644ef3"} Oct 01 14:03:04 crc kubenswrapper[4605]: I1001 14:03:04.375267 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="df36773f-c59f-4abb-9adf-20dee81012ae" containerName="sg-core" containerID="cri-o://1d26b7723ab97797c3971a3b49b924379e5550bb8c3fce715dd2885b4d6bd82a" gracePeriod=30 Oct 01 14:03:04 crc kubenswrapper[4605]: I1001 14:03:04.375347 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 01 14:03:04 crc kubenswrapper[4605]: I1001 14:03:04.375377 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="df36773f-c59f-4abb-9adf-20dee81012ae" containerName="proxy-httpd" containerID="cri-o://0fc0377b2029f37604c0c4bda103a216896c7fc8074deb6e8a7f544713644ef3" gracePeriod=30 Oct 01 14:03:04 crc kubenswrapper[4605]: I1001 14:03:04.398777 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-xj5tw" event={"ID":"e8f994e1-7878-41ab-b619-4946d957e710","Type":"ContainerStarted","Data":"36c926e892b2583480abb025793be6e8aee7874e4e154d9acd73ef5f94841bf7"} Oct 01 14:03:04 crc kubenswrapper[4605]: I1001 14:03:04.422684 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-978c2" event={"ID":"287fa988-b116-4b4d-a02c-990e801124d0","Type":"ContainerStarted","Data":"5dd8a3eef8be60b024489f72e74f8f402a7c7268e1358dd126a7e73114f67f4a"} Oct 01 14:03:04 crc kubenswrapper[4605]: I1001 14:03:04.498324 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-xj5tw" podStartSLOduration=3.507590828 podStartE2EDuration="58.498303279s" podCreationTimestamp="2025-10-01 14:02:06 +0000 UTC" firstStartedPulling="2025-10-01 14:02:08.550163191 +0000 UTC m=+1051.294139399" lastFinishedPulling="2025-10-01 14:03:03.540875642 +0000 UTC m=+1106.284851850" observedRunningTime="2025-10-01 14:03:04.447395721 +0000 UTC m=+1107.191371929" watchObservedRunningTime="2025-10-01 14:03:04.498303279 +0000 UTC m=+1107.242279487" Oct 01 14:03:04 crc kubenswrapper[4605]: I1001 14:03:04.511564 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-978c2" podStartSLOduration=2.739280639 podStartE2EDuration="1m2.511542064s" podCreationTimestamp="2025-10-01 14:02:02 +0000 UTC" firstStartedPulling="2025-10-01 14:02:03.868257939 +0000 UTC m=+1046.612234147" lastFinishedPulling="2025-10-01 14:03:03.640519364 +0000 UTC m=+1106.384495572" observedRunningTime="2025-10-01 14:03:04.494800551 +0000 UTC m=+1107.238776759" watchObservedRunningTime="2025-10-01 14:03:04.511542064 +0000 UTC m=+1107.255518272" Oct 01 14:03:05 crc kubenswrapper[4605]: I1001 14:03:05.327067 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 14:03:05 crc kubenswrapper[4605]: I1001 14:03:05.432180 4605 generic.go:334] "Generic (PLEG): container finished" podID="df36773f-c59f-4abb-9adf-20dee81012ae" containerID="0fc0377b2029f37604c0c4bda103a216896c7fc8074deb6e8a7f544713644ef3" exitCode=0 Oct 01 14:03:05 crc kubenswrapper[4605]: I1001 14:03:05.432222 4605 generic.go:334] "Generic (PLEG): container finished" podID="df36773f-c59f-4abb-9adf-20dee81012ae" containerID="1d26b7723ab97797c3971a3b49b924379e5550bb8c3fce715dd2885b4d6bd82a" exitCode=2 Oct 01 14:03:05 crc kubenswrapper[4605]: I1001 14:03:05.432245 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"df36773f-c59f-4abb-9adf-20dee81012ae","Type":"ContainerDied","Data":"0fc0377b2029f37604c0c4bda103a216896c7fc8074deb6e8a7f544713644ef3"} Oct 01 14:03:05 crc kubenswrapper[4605]: I1001 14:03:05.432279 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"df36773f-c59f-4abb-9adf-20dee81012ae","Type":"ContainerDied","Data":"1d26b7723ab97797c3971a3b49b924379e5550bb8c3fce715dd2885b4d6bd82a"} Oct 01 14:03:05 crc kubenswrapper[4605]: I1001 14:03:05.432294 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"df36773f-c59f-4abb-9adf-20dee81012ae","Type":"ContainerDied","Data":"07881f407a2c0424f0d04440674ac8125faf7ce3a119954e51de155b19acccaa"} Oct 01 14:03:05 crc kubenswrapper[4605]: I1001 14:03:05.432277 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 14:03:05 crc kubenswrapper[4605]: I1001 14:03:05.432394 4605 scope.go:117] "RemoveContainer" containerID="0fc0377b2029f37604c0c4bda103a216896c7fc8074deb6e8a7f544713644ef3" Oct 01 14:03:05 crc kubenswrapper[4605]: I1001 14:03:05.454477 4605 scope.go:117] "RemoveContainer" containerID="1d26b7723ab97797c3971a3b49b924379e5550bb8c3fce715dd2885b4d6bd82a" Oct 01 14:03:05 crc kubenswrapper[4605]: I1001 14:03:05.457376 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/df36773f-c59f-4abb-9adf-20dee81012ae-run-httpd\") pod \"df36773f-c59f-4abb-9adf-20dee81012ae\" (UID: \"df36773f-c59f-4abb-9adf-20dee81012ae\") " Oct 01 14:03:05 crc kubenswrapper[4605]: I1001 14:03:05.457438 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df36773f-c59f-4abb-9adf-20dee81012ae-combined-ca-bundle\") pod \"df36773f-c59f-4abb-9adf-20dee81012ae\" (UID: \"df36773f-c59f-4abb-9adf-20dee81012ae\") " Oct 01 14:03:05 crc kubenswrapper[4605]: I1001 14:03:05.457514 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/df36773f-c59f-4abb-9adf-20dee81012ae-sg-core-conf-yaml\") pod \"df36773f-c59f-4abb-9adf-20dee81012ae\" (UID: \"df36773f-c59f-4abb-9adf-20dee81012ae\") " Oct 01 14:03:05 crc kubenswrapper[4605]: I1001 14:03:05.457594 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/df36773f-c59f-4abb-9adf-20dee81012ae-log-httpd\") pod \"df36773f-c59f-4abb-9adf-20dee81012ae\" (UID: \"df36773f-c59f-4abb-9adf-20dee81012ae\") " Oct 01 14:03:05 crc kubenswrapper[4605]: I1001 14:03:05.457622 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df36773f-c59f-4abb-9adf-20dee81012ae-config-data\") pod \"df36773f-c59f-4abb-9adf-20dee81012ae\" (UID: \"df36773f-c59f-4abb-9adf-20dee81012ae\") " Oct 01 14:03:05 crc kubenswrapper[4605]: I1001 14:03:05.457682 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/df36773f-c59f-4abb-9adf-20dee81012ae-scripts\") pod \"df36773f-c59f-4abb-9adf-20dee81012ae\" (UID: \"df36773f-c59f-4abb-9adf-20dee81012ae\") " Oct 01 14:03:05 crc kubenswrapper[4605]: I1001 14:03:05.457709 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fzsqw\" (UniqueName: \"kubernetes.io/projected/df36773f-c59f-4abb-9adf-20dee81012ae-kube-api-access-fzsqw\") pod \"df36773f-c59f-4abb-9adf-20dee81012ae\" (UID: \"df36773f-c59f-4abb-9adf-20dee81012ae\") " Oct 01 14:03:05 crc kubenswrapper[4605]: I1001 14:03:05.457784 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/df36773f-c59f-4abb-9adf-20dee81012ae-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "df36773f-c59f-4abb-9adf-20dee81012ae" (UID: "df36773f-c59f-4abb-9adf-20dee81012ae"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:03:05 crc kubenswrapper[4605]: I1001 14:03:05.458027 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/df36773f-c59f-4abb-9adf-20dee81012ae-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "df36773f-c59f-4abb-9adf-20dee81012ae" (UID: "df36773f-c59f-4abb-9adf-20dee81012ae"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:03:05 crc kubenswrapper[4605]: I1001 14:03:05.459492 4605 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/df36773f-c59f-4abb-9adf-20dee81012ae-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 01 14:03:05 crc kubenswrapper[4605]: I1001 14:03:05.459621 4605 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/df36773f-c59f-4abb-9adf-20dee81012ae-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 01 14:03:05 crc kubenswrapper[4605]: I1001 14:03:05.464228 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/df36773f-c59f-4abb-9adf-20dee81012ae-scripts" (OuterVolumeSpecName: "scripts") pod "df36773f-c59f-4abb-9adf-20dee81012ae" (UID: "df36773f-c59f-4abb-9adf-20dee81012ae"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:03:05 crc kubenswrapper[4605]: I1001 14:03:05.470699 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/df36773f-c59f-4abb-9adf-20dee81012ae-kube-api-access-fzsqw" (OuterVolumeSpecName: "kube-api-access-fzsqw") pod "df36773f-c59f-4abb-9adf-20dee81012ae" (UID: "df36773f-c59f-4abb-9adf-20dee81012ae"). InnerVolumeSpecName "kube-api-access-fzsqw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:03:05 crc kubenswrapper[4605]: I1001 14:03:05.499603 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/df36773f-c59f-4abb-9adf-20dee81012ae-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "df36773f-c59f-4abb-9adf-20dee81012ae" (UID: "df36773f-c59f-4abb-9adf-20dee81012ae"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:03:05 crc kubenswrapper[4605]: I1001 14:03:05.507697 4605 scope.go:117] "RemoveContainer" containerID="0fc0377b2029f37604c0c4bda103a216896c7fc8074deb6e8a7f544713644ef3" Oct 01 14:03:05 crc kubenswrapper[4605]: E1001 14:03:05.508839 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0fc0377b2029f37604c0c4bda103a216896c7fc8074deb6e8a7f544713644ef3\": container with ID starting with 0fc0377b2029f37604c0c4bda103a216896c7fc8074deb6e8a7f544713644ef3 not found: ID does not exist" containerID="0fc0377b2029f37604c0c4bda103a216896c7fc8074deb6e8a7f544713644ef3" Oct 01 14:03:05 crc kubenswrapper[4605]: I1001 14:03:05.508873 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0fc0377b2029f37604c0c4bda103a216896c7fc8074deb6e8a7f544713644ef3"} err="failed to get container status \"0fc0377b2029f37604c0c4bda103a216896c7fc8074deb6e8a7f544713644ef3\": rpc error: code = NotFound desc = could not find container \"0fc0377b2029f37604c0c4bda103a216896c7fc8074deb6e8a7f544713644ef3\": container with ID starting with 0fc0377b2029f37604c0c4bda103a216896c7fc8074deb6e8a7f544713644ef3 not found: ID does not exist" Oct 01 14:03:05 crc kubenswrapper[4605]: I1001 14:03:05.508894 4605 scope.go:117] "RemoveContainer" containerID="1d26b7723ab97797c3971a3b49b924379e5550bb8c3fce715dd2885b4d6bd82a" Oct 01 14:03:05 crc kubenswrapper[4605]: E1001 14:03:05.513185 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1d26b7723ab97797c3971a3b49b924379e5550bb8c3fce715dd2885b4d6bd82a\": container with ID starting with 1d26b7723ab97797c3971a3b49b924379e5550bb8c3fce715dd2885b4d6bd82a not found: ID does not exist" containerID="1d26b7723ab97797c3971a3b49b924379e5550bb8c3fce715dd2885b4d6bd82a" Oct 01 14:03:05 crc kubenswrapper[4605]: I1001 14:03:05.513215 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d26b7723ab97797c3971a3b49b924379e5550bb8c3fce715dd2885b4d6bd82a"} err="failed to get container status \"1d26b7723ab97797c3971a3b49b924379e5550bb8c3fce715dd2885b4d6bd82a\": rpc error: code = NotFound desc = could not find container \"1d26b7723ab97797c3971a3b49b924379e5550bb8c3fce715dd2885b4d6bd82a\": container with ID starting with 1d26b7723ab97797c3971a3b49b924379e5550bb8c3fce715dd2885b4d6bd82a not found: ID does not exist" Oct 01 14:03:05 crc kubenswrapper[4605]: I1001 14:03:05.513233 4605 scope.go:117] "RemoveContainer" containerID="0fc0377b2029f37604c0c4bda103a216896c7fc8074deb6e8a7f544713644ef3" Oct 01 14:03:05 crc kubenswrapper[4605]: I1001 14:03:05.516582 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0fc0377b2029f37604c0c4bda103a216896c7fc8074deb6e8a7f544713644ef3"} err="failed to get container status \"0fc0377b2029f37604c0c4bda103a216896c7fc8074deb6e8a7f544713644ef3\": rpc error: code = NotFound desc = could not find container \"0fc0377b2029f37604c0c4bda103a216896c7fc8074deb6e8a7f544713644ef3\": container with ID starting with 0fc0377b2029f37604c0c4bda103a216896c7fc8074deb6e8a7f544713644ef3 not found: ID does not exist" Oct 01 14:03:05 crc kubenswrapper[4605]: I1001 14:03:05.516607 4605 scope.go:117] "RemoveContainer" containerID="1d26b7723ab97797c3971a3b49b924379e5550bb8c3fce715dd2885b4d6bd82a" Oct 01 14:03:05 crc kubenswrapper[4605]: I1001 14:03:05.516862 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d26b7723ab97797c3971a3b49b924379e5550bb8c3fce715dd2885b4d6bd82a"} err="failed to get container status \"1d26b7723ab97797c3971a3b49b924379e5550bb8c3fce715dd2885b4d6bd82a\": rpc error: code = NotFound desc = could not find container \"1d26b7723ab97797c3971a3b49b924379e5550bb8c3fce715dd2885b4d6bd82a\": container with ID starting with 1d26b7723ab97797c3971a3b49b924379e5550bb8c3fce715dd2885b4d6bd82a not found: ID does not exist" Oct 01 14:03:05 crc kubenswrapper[4605]: I1001 14:03:05.528054 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/df36773f-c59f-4abb-9adf-20dee81012ae-config-data" (OuterVolumeSpecName: "config-data") pod "df36773f-c59f-4abb-9adf-20dee81012ae" (UID: "df36773f-c59f-4abb-9adf-20dee81012ae"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:03:05 crc kubenswrapper[4605]: I1001 14:03:05.536038 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/df36773f-c59f-4abb-9adf-20dee81012ae-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "df36773f-c59f-4abb-9adf-20dee81012ae" (UID: "df36773f-c59f-4abb-9adf-20dee81012ae"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:03:05 crc kubenswrapper[4605]: I1001 14:03:05.561118 4605 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df36773f-c59f-4abb-9adf-20dee81012ae-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 14:03:05 crc kubenswrapper[4605]: I1001 14:03:05.561150 4605 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/df36773f-c59f-4abb-9adf-20dee81012ae-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 01 14:03:05 crc kubenswrapper[4605]: I1001 14:03:05.561160 4605 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df36773f-c59f-4abb-9adf-20dee81012ae-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 14:03:05 crc kubenswrapper[4605]: I1001 14:03:05.561170 4605 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/df36773f-c59f-4abb-9adf-20dee81012ae-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 14:03:05 crc kubenswrapper[4605]: I1001 14:03:05.561206 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fzsqw\" (UniqueName: \"kubernetes.io/projected/df36773f-c59f-4abb-9adf-20dee81012ae-kube-api-access-fzsqw\") on node \"crc\" DevicePath \"\"" Oct 01 14:03:05 crc kubenswrapper[4605]: I1001 14:03:05.800582 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 01 14:03:05 crc kubenswrapper[4605]: I1001 14:03:05.815353 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 01 14:03:05 crc kubenswrapper[4605]: I1001 14:03:05.838124 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 01 14:03:05 crc kubenswrapper[4605]: E1001 14:03:05.838543 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df36773f-c59f-4abb-9adf-20dee81012ae" containerName="proxy-httpd" Oct 01 14:03:05 crc kubenswrapper[4605]: I1001 14:03:05.838561 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="df36773f-c59f-4abb-9adf-20dee81012ae" containerName="proxy-httpd" Oct 01 14:03:05 crc kubenswrapper[4605]: E1001 14:03:05.838598 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df36773f-c59f-4abb-9adf-20dee81012ae" containerName="sg-core" Oct 01 14:03:05 crc kubenswrapper[4605]: I1001 14:03:05.838605 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="df36773f-c59f-4abb-9adf-20dee81012ae" containerName="sg-core" Oct 01 14:03:05 crc kubenswrapper[4605]: I1001 14:03:05.838767 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="df36773f-c59f-4abb-9adf-20dee81012ae" containerName="proxy-httpd" Oct 01 14:03:05 crc kubenswrapper[4605]: I1001 14:03:05.838785 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="df36773f-c59f-4abb-9adf-20dee81012ae" containerName="sg-core" Oct 01 14:03:05 crc kubenswrapper[4605]: I1001 14:03:05.841352 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 14:03:05 crc kubenswrapper[4605]: I1001 14:03:05.844365 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 01 14:03:05 crc kubenswrapper[4605]: I1001 14:03:05.844568 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 01 14:03:05 crc kubenswrapper[4605]: I1001 14:03:05.872737 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 01 14:03:05 crc kubenswrapper[4605]: I1001 14:03:05.935004 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="df36773f-c59f-4abb-9adf-20dee81012ae" path="/var/lib/kubelet/pods/df36773f-c59f-4abb-9adf-20dee81012ae/volumes" Oct 01 14:03:05 crc kubenswrapper[4605]: I1001 14:03:05.968296 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/45659557-27c7-4a59-afbf-27c09718d6f7-log-httpd\") pod \"ceilometer-0\" (UID: \"45659557-27c7-4a59-afbf-27c09718d6f7\") " pod="openstack/ceilometer-0" Oct 01 14:03:05 crc kubenswrapper[4605]: I1001 14:03:05.968356 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5bd76\" (UniqueName: \"kubernetes.io/projected/45659557-27c7-4a59-afbf-27c09718d6f7-kube-api-access-5bd76\") pod \"ceilometer-0\" (UID: \"45659557-27c7-4a59-afbf-27c09718d6f7\") " pod="openstack/ceilometer-0" Oct 01 14:03:05 crc kubenswrapper[4605]: I1001 14:03:05.968449 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45659557-27c7-4a59-afbf-27c09718d6f7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"45659557-27c7-4a59-afbf-27c09718d6f7\") " pod="openstack/ceilometer-0" Oct 01 14:03:05 crc kubenswrapper[4605]: I1001 14:03:05.968508 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/45659557-27c7-4a59-afbf-27c09718d6f7-scripts\") pod \"ceilometer-0\" (UID: \"45659557-27c7-4a59-afbf-27c09718d6f7\") " pod="openstack/ceilometer-0" Oct 01 14:03:05 crc kubenswrapper[4605]: I1001 14:03:05.968603 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/45659557-27c7-4a59-afbf-27c09718d6f7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"45659557-27c7-4a59-afbf-27c09718d6f7\") " pod="openstack/ceilometer-0" Oct 01 14:03:05 crc kubenswrapper[4605]: I1001 14:03:05.968629 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/45659557-27c7-4a59-afbf-27c09718d6f7-config-data\") pod \"ceilometer-0\" (UID: \"45659557-27c7-4a59-afbf-27c09718d6f7\") " pod="openstack/ceilometer-0" Oct 01 14:03:05 crc kubenswrapper[4605]: I1001 14:03:05.968660 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/45659557-27c7-4a59-afbf-27c09718d6f7-run-httpd\") pod \"ceilometer-0\" (UID: \"45659557-27c7-4a59-afbf-27c09718d6f7\") " pod="openstack/ceilometer-0" Oct 01 14:03:06 crc kubenswrapper[4605]: I1001 14:03:06.070308 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/45659557-27c7-4a59-afbf-27c09718d6f7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"45659557-27c7-4a59-afbf-27c09718d6f7\") " pod="openstack/ceilometer-0" Oct 01 14:03:06 crc kubenswrapper[4605]: I1001 14:03:06.070377 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/45659557-27c7-4a59-afbf-27c09718d6f7-config-data\") pod \"ceilometer-0\" (UID: \"45659557-27c7-4a59-afbf-27c09718d6f7\") " pod="openstack/ceilometer-0" Oct 01 14:03:06 crc kubenswrapper[4605]: I1001 14:03:06.070440 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/45659557-27c7-4a59-afbf-27c09718d6f7-run-httpd\") pod \"ceilometer-0\" (UID: \"45659557-27c7-4a59-afbf-27c09718d6f7\") " pod="openstack/ceilometer-0" Oct 01 14:03:06 crc kubenswrapper[4605]: I1001 14:03:06.070524 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/45659557-27c7-4a59-afbf-27c09718d6f7-log-httpd\") pod \"ceilometer-0\" (UID: \"45659557-27c7-4a59-afbf-27c09718d6f7\") " pod="openstack/ceilometer-0" Oct 01 14:03:06 crc kubenswrapper[4605]: I1001 14:03:06.070570 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5bd76\" (UniqueName: \"kubernetes.io/projected/45659557-27c7-4a59-afbf-27c09718d6f7-kube-api-access-5bd76\") pod \"ceilometer-0\" (UID: \"45659557-27c7-4a59-afbf-27c09718d6f7\") " pod="openstack/ceilometer-0" Oct 01 14:03:06 crc kubenswrapper[4605]: I1001 14:03:06.070639 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45659557-27c7-4a59-afbf-27c09718d6f7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"45659557-27c7-4a59-afbf-27c09718d6f7\") " pod="openstack/ceilometer-0" Oct 01 14:03:06 crc kubenswrapper[4605]: I1001 14:03:06.070704 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/45659557-27c7-4a59-afbf-27c09718d6f7-scripts\") pod \"ceilometer-0\" (UID: \"45659557-27c7-4a59-afbf-27c09718d6f7\") " pod="openstack/ceilometer-0" Oct 01 14:03:06 crc kubenswrapper[4605]: I1001 14:03:06.072313 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/45659557-27c7-4a59-afbf-27c09718d6f7-log-httpd\") pod \"ceilometer-0\" (UID: \"45659557-27c7-4a59-afbf-27c09718d6f7\") " pod="openstack/ceilometer-0" Oct 01 14:03:06 crc kubenswrapper[4605]: I1001 14:03:06.072402 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/45659557-27c7-4a59-afbf-27c09718d6f7-run-httpd\") pod \"ceilometer-0\" (UID: \"45659557-27c7-4a59-afbf-27c09718d6f7\") " pod="openstack/ceilometer-0" Oct 01 14:03:06 crc kubenswrapper[4605]: I1001 14:03:06.090043 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/45659557-27c7-4a59-afbf-27c09718d6f7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"45659557-27c7-4a59-afbf-27c09718d6f7\") " pod="openstack/ceilometer-0" Oct 01 14:03:06 crc kubenswrapper[4605]: I1001 14:03:06.090329 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/45659557-27c7-4a59-afbf-27c09718d6f7-config-data\") pod \"ceilometer-0\" (UID: \"45659557-27c7-4a59-afbf-27c09718d6f7\") " pod="openstack/ceilometer-0" Oct 01 14:03:06 crc kubenswrapper[4605]: I1001 14:03:06.090591 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/45659557-27c7-4a59-afbf-27c09718d6f7-scripts\") pod \"ceilometer-0\" (UID: \"45659557-27c7-4a59-afbf-27c09718d6f7\") " pod="openstack/ceilometer-0" Oct 01 14:03:06 crc kubenswrapper[4605]: I1001 14:03:06.094993 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45659557-27c7-4a59-afbf-27c09718d6f7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"45659557-27c7-4a59-afbf-27c09718d6f7\") " pod="openstack/ceilometer-0" Oct 01 14:03:06 crc kubenswrapper[4605]: I1001 14:03:06.101414 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5bd76\" (UniqueName: \"kubernetes.io/projected/45659557-27c7-4a59-afbf-27c09718d6f7-kube-api-access-5bd76\") pod \"ceilometer-0\" (UID: \"45659557-27c7-4a59-afbf-27c09718d6f7\") " pod="openstack/ceilometer-0" Oct 01 14:03:06 crc kubenswrapper[4605]: I1001 14:03:06.169287 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 14:03:06 crc kubenswrapper[4605]: I1001 14:03:06.622032 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 01 14:03:06 crc kubenswrapper[4605]: W1001 14:03:06.634523 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod45659557_27c7_4a59_afbf_27c09718d6f7.slice/crio-6787e3caa32fcfa70524cd6baae05e92da1a75862c82dc3bebb87e5d2ff70e52 WatchSource:0}: Error finding container 6787e3caa32fcfa70524cd6baae05e92da1a75862c82dc3bebb87e5d2ff70e52: Status 404 returned error can't find the container with id 6787e3caa32fcfa70524cd6baae05e92da1a75862c82dc3bebb87e5d2ff70e52 Oct 01 14:03:07 crc kubenswrapper[4605]: I1001 14:03:07.460450 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"45659557-27c7-4a59-afbf-27c09718d6f7","Type":"ContainerStarted","Data":"ec311de54a1fe907b20987b92ee0d73cf7dd1c8ba41b14d2e6aa6f047f0871a6"} Oct 01 14:03:07 crc kubenswrapper[4605]: I1001 14:03:07.460804 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"45659557-27c7-4a59-afbf-27c09718d6f7","Type":"ContainerStarted","Data":"6787e3caa32fcfa70524cd6baae05e92da1a75862c82dc3bebb87e5d2ff70e52"} Oct 01 14:03:08 crc kubenswrapper[4605]: I1001 14:03:08.474712 4605 generic.go:334] "Generic (PLEG): container finished" podID="e8f994e1-7878-41ab-b619-4946d957e710" containerID="36c926e892b2583480abb025793be6e8aee7874e4e154d9acd73ef5f94841bf7" exitCode=0 Oct 01 14:03:08 crc kubenswrapper[4605]: I1001 14:03:08.474802 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-xj5tw" event={"ID":"e8f994e1-7878-41ab-b619-4946d957e710","Type":"ContainerDied","Data":"36c926e892b2583480abb025793be6e8aee7874e4e154d9acd73ef5f94841bf7"} Oct 01 14:03:08 crc kubenswrapper[4605]: I1001 14:03:08.476578 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"45659557-27c7-4a59-afbf-27c09718d6f7","Type":"ContainerStarted","Data":"d764c8bddc16be2af1844bea8e0e18cf2a336f52a5afde15acccbe2700912180"} Oct 01 14:03:09 crc kubenswrapper[4605]: I1001 14:03:09.489181 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"45659557-27c7-4a59-afbf-27c09718d6f7","Type":"ContainerStarted","Data":"9071895aa65ccbe403e7e2bf405c882b4c217f9992dc5831303042425173982a"} Oct 01 14:03:09 crc kubenswrapper[4605]: I1001 14:03:09.853627 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-xj5tw" Oct 01 14:03:09 crc kubenswrapper[4605]: I1001 14:03:09.946027 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mrznj\" (UniqueName: \"kubernetes.io/projected/e8f994e1-7878-41ab-b619-4946d957e710-kube-api-access-mrznj\") pod \"e8f994e1-7878-41ab-b619-4946d957e710\" (UID: \"e8f994e1-7878-41ab-b619-4946d957e710\") " Oct 01 14:03:09 crc kubenswrapper[4605]: I1001 14:03:09.946115 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e8f994e1-7878-41ab-b619-4946d957e710-db-sync-config-data\") pod \"e8f994e1-7878-41ab-b619-4946d957e710\" (UID: \"e8f994e1-7878-41ab-b619-4946d957e710\") " Oct 01 14:03:09 crc kubenswrapper[4605]: I1001 14:03:09.946174 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8f994e1-7878-41ab-b619-4946d957e710-combined-ca-bundle\") pod \"e8f994e1-7878-41ab-b619-4946d957e710\" (UID: \"e8f994e1-7878-41ab-b619-4946d957e710\") " Oct 01 14:03:09 crc kubenswrapper[4605]: I1001 14:03:09.951405 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e8f994e1-7878-41ab-b619-4946d957e710-kube-api-access-mrznj" (OuterVolumeSpecName: "kube-api-access-mrznj") pod "e8f994e1-7878-41ab-b619-4946d957e710" (UID: "e8f994e1-7878-41ab-b619-4946d957e710"). InnerVolumeSpecName "kube-api-access-mrznj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:03:09 crc kubenswrapper[4605]: I1001 14:03:09.955354 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e8f994e1-7878-41ab-b619-4946d957e710-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "e8f994e1-7878-41ab-b619-4946d957e710" (UID: "e8f994e1-7878-41ab-b619-4946d957e710"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:03:09 crc kubenswrapper[4605]: I1001 14:03:09.970977 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e8f994e1-7878-41ab-b619-4946d957e710-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e8f994e1-7878-41ab-b619-4946d957e710" (UID: "e8f994e1-7878-41ab-b619-4946d957e710"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:03:10 crc kubenswrapper[4605]: I1001 14:03:10.048047 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mrznj\" (UniqueName: \"kubernetes.io/projected/e8f994e1-7878-41ab-b619-4946d957e710-kube-api-access-mrznj\") on node \"crc\" DevicePath \"\"" Oct 01 14:03:10 crc kubenswrapper[4605]: I1001 14:03:10.048076 4605 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e8f994e1-7878-41ab-b619-4946d957e710-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 14:03:10 crc kubenswrapper[4605]: I1001 14:03:10.048084 4605 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8f994e1-7878-41ab-b619-4946d957e710-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 14:03:10 crc kubenswrapper[4605]: I1001 14:03:10.499281 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-xj5tw" event={"ID":"e8f994e1-7878-41ab-b619-4946d957e710","Type":"ContainerDied","Data":"ea75477a7a647525ad692b9bb62d5eb9017e34db832e284c2dd8c8cd8f532ccc"} Oct 01 14:03:10 crc kubenswrapper[4605]: I1001 14:03:10.499539 4605 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ea75477a7a647525ad692b9bb62d5eb9017e34db832e284c2dd8c8cd8f532ccc" Oct 01 14:03:10 crc kubenswrapper[4605]: I1001 14:03:10.499605 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-xj5tw" Oct 01 14:03:10 crc kubenswrapper[4605]: I1001 14:03:10.749949 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-66797cbc6-zrzb7"] Oct 01 14:03:10 crc kubenswrapper[4605]: E1001 14:03:10.750291 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e8f994e1-7878-41ab-b619-4946d957e710" containerName="barbican-db-sync" Oct 01 14:03:10 crc kubenswrapper[4605]: I1001 14:03:10.750302 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="e8f994e1-7878-41ab-b619-4946d957e710" containerName="barbican-db-sync" Oct 01 14:03:10 crc kubenswrapper[4605]: I1001 14:03:10.750462 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="e8f994e1-7878-41ab-b619-4946d957e710" containerName="barbican-db-sync" Oct 01 14:03:10 crc kubenswrapper[4605]: I1001 14:03:10.751348 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-66797cbc6-zrzb7" Oct 01 14:03:10 crc kubenswrapper[4605]: I1001 14:03:10.759268 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-mxktz" Oct 01 14:03:10 crc kubenswrapper[4605]: I1001 14:03:10.759277 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Oct 01 14:03:10 crc kubenswrapper[4605]: I1001 14:03:10.759457 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Oct 01 14:03:10 crc kubenswrapper[4605]: I1001 14:03:10.834273 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-798f8d7567-pfjfn"] Oct 01 14:03:10 crc kubenswrapper[4605]: I1001 14:03:10.835568 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-798f8d7567-pfjfn" Oct 01 14:03:10 crc kubenswrapper[4605]: I1001 14:03:10.843728 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Oct 01 14:03:10 crc kubenswrapper[4605]: I1001 14:03:10.859157 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f8f7068-62be-4ee7-9f6a-63812a2f5413-config-data\") pod \"barbican-worker-798f8d7567-pfjfn\" (UID: \"6f8f7068-62be-4ee7-9f6a-63812a2f5413\") " pod="openstack/barbican-worker-798f8d7567-pfjfn" Oct 01 14:03:10 crc kubenswrapper[4605]: I1001 14:03:10.859194 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6f8f7068-62be-4ee7-9f6a-63812a2f5413-config-data-custom\") pod \"barbican-worker-798f8d7567-pfjfn\" (UID: \"6f8f7068-62be-4ee7-9f6a-63812a2f5413\") " pod="openstack/barbican-worker-798f8d7567-pfjfn" Oct 01 14:03:10 crc kubenswrapper[4605]: I1001 14:03:10.859211 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6f8f7068-62be-4ee7-9f6a-63812a2f5413-logs\") pod \"barbican-worker-798f8d7567-pfjfn\" (UID: \"6f8f7068-62be-4ee7-9f6a-63812a2f5413\") " pod="openstack/barbican-worker-798f8d7567-pfjfn" Oct 01 14:03:10 crc kubenswrapper[4605]: I1001 14:03:10.859254 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f8f7068-62be-4ee7-9f6a-63812a2f5413-combined-ca-bundle\") pod \"barbican-worker-798f8d7567-pfjfn\" (UID: \"6f8f7068-62be-4ee7-9f6a-63812a2f5413\") " pod="openstack/barbican-worker-798f8d7567-pfjfn" Oct 01 14:03:10 crc kubenswrapper[4605]: I1001 14:03:10.859274 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/991f1482-e0da-43ae-89c5-bb9a5beaee2f-combined-ca-bundle\") pod \"barbican-keystone-listener-66797cbc6-zrzb7\" (UID: \"991f1482-e0da-43ae-89c5-bb9a5beaee2f\") " pod="openstack/barbican-keystone-listener-66797cbc6-zrzb7" Oct 01 14:03:10 crc kubenswrapper[4605]: I1001 14:03:10.859291 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-87wq6\" (UniqueName: \"kubernetes.io/projected/6f8f7068-62be-4ee7-9f6a-63812a2f5413-kube-api-access-87wq6\") pod \"barbican-worker-798f8d7567-pfjfn\" (UID: \"6f8f7068-62be-4ee7-9f6a-63812a2f5413\") " pod="openstack/barbican-worker-798f8d7567-pfjfn" Oct 01 14:03:10 crc kubenswrapper[4605]: I1001 14:03:10.859331 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/991f1482-e0da-43ae-89c5-bb9a5beaee2f-config-data-custom\") pod \"barbican-keystone-listener-66797cbc6-zrzb7\" (UID: \"991f1482-e0da-43ae-89c5-bb9a5beaee2f\") " pod="openstack/barbican-keystone-listener-66797cbc6-zrzb7" Oct 01 14:03:10 crc kubenswrapper[4605]: I1001 14:03:10.859370 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/991f1482-e0da-43ae-89c5-bb9a5beaee2f-logs\") pod \"barbican-keystone-listener-66797cbc6-zrzb7\" (UID: \"991f1482-e0da-43ae-89c5-bb9a5beaee2f\") " pod="openstack/barbican-keystone-listener-66797cbc6-zrzb7" Oct 01 14:03:10 crc kubenswrapper[4605]: I1001 14:03:10.859393 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fdsmb\" (UniqueName: \"kubernetes.io/projected/991f1482-e0da-43ae-89c5-bb9a5beaee2f-kube-api-access-fdsmb\") pod \"barbican-keystone-listener-66797cbc6-zrzb7\" (UID: \"991f1482-e0da-43ae-89c5-bb9a5beaee2f\") " pod="openstack/barbican-keystone-listener-66797cbc6-zrzb7" Oct 01 14:03:10 crc kubenswrapper[4605]: I1001 14:03:10.859444 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/991f1482-e0da-43ae-89c5-bb9a5beaee2f-config-data\") pod \"barbican-keystone-listener-66797cbc6-zrzb7\" (UID: \"991f1482-e0da-43ae-89c5-bb9a5beaee2f\") " pod="openstack/barbican-keystone-listener-66797cbc6-zrzb7" Oct 01 14:03:10 crc kubenswrapper[4605]: I1001 14:03:10.860063 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-66797cbc6-zrzb7"] Oct 01 14:03:10 crc kubenswrapper[4605]: I1001 14:03:10.878628 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-798f8d7567-pfjfn"] Oct 01 14:03:10 crc kubenswrapper[4605]: I1001 14:03:10.949817 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-59d5ff467f-hvfr5"] Oct 01 14:03:10 crc kubenswrapper[4605]: I1001 14:03:10.951245 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59d5ff467f-hvfr5" Oct 01 14:03:10 crc kubenswrapper[4605]: I1001 14:03:10.961026 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/991f1482-e0da-43ae-89c5-bb9a5beaee2f-config-data-custom\") pod \"barbican-keystone-listener-66797cbc6-zrzb7\" (UID: \"991f1482-e0da-43ae-89c5-bb9a5beaee2f\") " pod="openstack/barbican-keystone-listener-66797cbc6-zrzb7" Oct 01 14:03:10 crc kubenswrapper[4605]: I1001 14:03:10.961183 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/991f1482-e0da-43ae-89c5-bb9a5beaee2f-logs\") pod \"barbican-keystone-listener-66797cbc6-zrzb7\" (UID: \"991f1482-e0da-43ae-89c5-bb9a5beaee2f\") " pod="openstack/barbican-keystone-listener-66797cbc6-zrzb7" Oct 01 14:03:10 crc kubenswrapper[4605]: I1001 14:03:10.961223 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fdsmb\" (UniqueName: \"kubernetes.io/projected/991f1482-e0da-43ae-89c5-bb9a5beaee2f-kube-api-access-fdsmb\") pod \"barbican-keystone-listener-66797cbc6-zrzb7\" (UID: \"991f1482-e0da-43ae-89c5-bb9a5beaee2f\") " pod="openstack/barbican-keystone-listener-66797cbc6-zrzb7" Oct 01 14:03:10 crc kubenswrapper[4605]: I1001 14:03:10.961289 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/991f1482-e0da-43ae-89c5-bb9a5beaee2f-config-data\") pod \"barbican-keystone-listener-66797cbc6-zrzb7\" (UID: \"991f1482-e0da-43ae-89c5-bb9a5beaee2f\") " pod="openstack/barbican-keystone-listener-66797cbc6-zrzb7" Oct 01 14:03:10 crc kubenswrapper[4605]: I1001 14:03:10.961318 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f8f7068-62be-4ee7-9f6a-63812a2f5413-config-data\") pod \"barbican-worker-798f8d7567-pfjfn\" (UID: \"6f8f7068-62be-4ee7-9f6a-63812a2f5413\") " pod="openstack/barbican-worker-798f8d7567-pfjfn" Oct 01 14:03:10 crc kubenswrapper[4605]: I1001 14:03:10.961336 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6f8f7068-62be-4ee7-9f6a-63812a2f5413-config-data-custom\") pod \"barbican-worker-798f8d7567-pfjfn\" (UID: \"6f8f7068-62be-4ee7-9f6a-63812a2f5413\") " pod="openstack/barbican-worker-798f8d7567-pfjfn" Oct 01 14:03:10 crc kubenswrapper[4605]: I1001 14:03:10.961351 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6f8f7068-62be-4ee7-9f6a-63812a2f5413-logs\") pod \"barbican-worker-798f8d7567-pfjfn\" (UID: \"6f8f7068-62be-4ee7-9f6a-63812a2f5413\") " pod="openstack/barbican-worker-798f8d7567-pfjfn" Oct 01 14:03:10 crc kubenswrapper[4605]: I1001 14:03:10.961414 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f8f7068-62be-4ee7-9f6a-63812a2f5413-combined-ca-bundle\") pod \"barbican-worker-798f8d7567-pfjfn\" (UID: \"6f8f7068-62be-4ee7-9f6a-63812a2f5413\") " pod="openstack/barbican-worker-798f8d7567-pfjfn" Oct 01 14:03:10 crc kubenswrapper[4605]: I1001 14:03:10.961432 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/991f1482-e0da-43ae-89c5-bb9a5beaee2f-combined-ca-bundle\") pod \"barbican-keystone-listener-66797cbc6-zrzb7\" (UID: \"991f1482-e0da-43ae-89c5-bb9a5beaee2f\") " pod="openstack/barbican-keystone-listener-66797cbc6-zrzb7" Oct 01 14:03:10 crc kubenswrapper[4605]: I1001 14:03:10.961449 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-87wq6\" (UniqueName: \"kubernetes.io/projected/6f8f7068-62be-4ee7-9f6a-63812a2f5413-kube-api-access-87wq6\") pod \"barbican-worker-798f8d7567-pfjfn\" (UID: \"6f8f7068-62be-4ee7-9f6a-63812a2f5413\") " pod="openstack/barbican-worker-798f8d7567-pfjfn" Oct 01 14:03:10 crc kubenswrapper[4605]: I1001 14:03:10.964891 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-59d5ff467f-hvfr5"] Oct 01 14:03:10 crc kubenswrapper[4605]: I1001 14:03:10.974672 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/991f1482-e0da-43ae-89c5-bb9a5beaee2f-logs\") pod \"barbican-keystone-listener-66797cbc6-zrzb7\" (UID: \"991f1482-e0da-43ae-89c5-bb9a5beaee2f\") " pod="openstack/barbican-keystone-listener-66797cbc6-zrzb7" Oct 01 14:03:10 crc kubenswrapper[4605]: I1001 14:03:10.977447 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/991f1482-e0da-43ae-89c5-bb9a5beaee2f-config-data-custom\") pod \"barbican-keystone-listener-66797cbc6-zrzb7\" (UID: \"991f1482-e0da-43ae-89c5-bb9a5beaee2f\") " pod="openstack/barbican-keystone-listener-66797cbc6-zrzb7" Oct 01 14:03:10 crc kubenswrapper[4605]: I1001 14:03:10.977924 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/991f1482-e0da-43ae-89c5-bb9a5beaee2f-config-data\") pod \"barbican-keystone-listener-66797cbc6-zrzb7\" (UID: \"991f1482-e0da-43ae-89c5-bb9a5beaee2f\") " pod="openstack/barbican-keystone-listener-66797cbc6-zrzb7" Oct 01 14:03:10 crc kubenswrapper[4605]: I1001 14:03:10.979813 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6f8f7068-62be-4ee7-9f6a-63812a2f5413-logs\") pod \"barbican-worker-798f8d7567-pfjfn\" (UID: \"6f8f7068-62be-4ee7-9f6a-63812a2f5413\") " pod="openstack/barbican-worker-798f8d7567-pfjfn" Oct 01 14:03:10 crc kubenswrapper[4605]: I1001 14:03:10.982710 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f8f7068-62be-4ee7-9f6a-63812a2f5413-config-data\") pod \"barbican-worker-798f8d7567-pfjfn\" (UID: \"6f8f7068-62be-4ee7-9f6a-63812a2f5413\") " pod="openstack/barbican-worker-798f8d7567-pfjfn" Oct 01 14:03:10 crc kubenswrapper[4605]: I1001 14:03:10.997240 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/991f1482-e0da-43ae-89c5-bb9a5beaee2f-combined-ca-bundle\") pod \"barbican-keystone-listener-66797cbc6-zrzb7\" (UID: \"991f1482-e0da-43ae-89c5-bb9a5beaee2f\") " pod="openstack/barbican-keystone-listener-66797cbc6-zrzb7" Oct 01 14:03:11 crc kubenswrapper[4605]: I1001 14:03:11.018717 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6f8f7068-62be-4ee7-9f6a-63812a2f5413-config-data-custom\") pod \"barbican-worker-798f8d7567-pfjfn\" (UID: \"6f8f7068-62be-4ee7-9f6a-63812a2f5413\") " pod="openstack/barbican-worker-798f8d7567-pfjfn" Oct 01 14:03:11 crc kubenswrapper[4605]: I1001 14:03:11.041472 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-87wq6\" (UniqueName: \"kubernetes.io/projected/6f8f7068-62be-4ee7-9f6a-63812a2f5413-kube-api-access-87wq6\") pod \"barbican-worker-798f8d7567-pfjfn\" (UID: \"6f8f7068-62be-4ee7-9f6a-63812a2f5413\") " pod="openstack/barbican-worker-798f8d7567-pfjfn" Oct 01 14:03:11 crc kubenswrapper[4605]: I1001 14:03:11.043024 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f8f7068-62be-4ee7-9f6a-63812a2f5413-combined-ca-bundle\") pod \"barbican-worker-798f8d7567-pfjfn\" (UID: \"6f8f7068-62be-4ee7-9f6a-63812a2f5413\") " pod="openstack/barbican-worker-798f8d7567-pfjfn" Oct 01 14:03:11 crc kubenswrapper[4605]: I1001 14:03:11.070226 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fdsmb\" (UniqueName: \"kubernetes.io/projected/991f1482-e0da-43ae-89c5-bb9a5beaee2f-kube-api-access-fdsmb\") pod \"barbican-keystone-listener-66797cbc6-zrzb7\" (UID: \"991f1482-e0da-43ae-89c5-bb9a5beaee2f\") " pod="openstack/barbican-keystone-listener-66797cbc6-zrzb7" Oct 01 14:03:11 crc kubenswrapper[4605]: I1001 14:03:11.071525 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f759e02e-90e1-4fa7-b0e5-d79e726d077c-ovsdbserver-sb\") pod \"dnsmasq-dns-59d5ff467f-hvfr5\" (UID: \"f759e02e-90e1-4fa7-b0e5-d79e726d077c\") " pod="openstack/dnsmasq-dns-59d5ff467f-hvfr5" Oct 01 14:03:11 crc kubenswrapper[4605]: I1001 14:03:11.071585 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f759e02e-90e1-4fa7-b0e5-d79e726d077c-dns-svc\") pod \"dnsmasq-dns-59d5ff467f-hvfr5\" (UID: \"f759e02e-90e1-4fa7-b0e5-d79e726d077c\") " pod="openstack/dnsmasq-dns-59d5ff467f-hvfr5" Oct 01 14:03:11 crc kubenswrapper[4605]: I1001 14:03:11.071621 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f992w\" (UniqueName: \"kubernetes.io/projected/f759e02e-90e1-4fa7-b0e5-d79e726d077c-kube-api-access-f992w\") pod \"dnsmasq-dns-59d5ff467f-hvfr5\" (UID: \"f759e02e-90e1-4fa7-b0e5-d79e726d077c\") " pod="openstack/dnsmasq-dns-59d5ff467f-hvfr5" Oct 01 14:03:11 crc kubenswrapper[4605]: I1001 14:03:11.071691 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f759e02e-90e1-4fa7-b0e5-d79e726d077c-dns-swift-storage-0\") pod \"dnsmasq-dns-59d5ff467f-hvfr5\" (UID: \"f759e02e-90e1-4fa7-b0e5-d79e726d077c\") " pod="openstack/dnsmasq-dns-59d5ff467f-hvfr5" Oct 01 14:03:11 crc kubenswrapper[4605]: I1001 14:03:11.071727 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f759e02e-90e1-4fa7-b0e5-d79e726d077c-config\") pod \"dnsmasq-dns-59d5ff467f-hvfr5\" (UID: \"f759e02e-90e1-4fa7-b0e5-d79e726d077c\") " pod="openstack/dnsmasq-dns-59d5ff467f-hvfr5" Oct 01 14:03:11 crc kubenswrapper[4605]: I1001 14:03:11.071756 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f759e02e-90e1-4fa7-b0e5-d79e726d077c-ovsdbserver-nb\") pod \"dnsmasq-dns-59d5ff467f-hvfr5\" (UID: \"f759e02e-90e1-4fa7-b0e5-d79e726d077c\") " pod="openstack/dnsmasq-dns-59d5ff467f-hvfr5" Oct 01 14:03:11 crc kubenswrapper[4605]: I1001 14:03:11.173479 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f759e02e-90e1-4fa7-b0e5-d79e726d077c-dns-svc\") pod \"dnsmasq-dns-59d5ff467f-hvfr5\" (UID: \"f759e02e-90e1-4fa7-b0e5-d79e726d077c\") " pod="openstack/dnsmasq-dns-59d5ff467f-hvfr5" Oct 01 14:03:11 crc kubenswrapper[4605]: I1001 14:03:11.173542 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f992w\" (UniqueName: \"kubernetes.io/projected/f759e02e-90e1-4fa7-b0e5-d79e726d077c-kube-api-access-f992w\") pod \"dnsmasq-dns-59d5ff467f-hvfr5\" (UID: \"f759e02e-90e1-4fa7-b0e5-d79e726d077c\") " pod="openstack/dnsmasq-dns-59d5ff467f-hvfr5" Oct 01 14:03:11 crc kubenswrapper[4605]: I1001 14:03:11.180867 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-798f8d7567-pfjfn" Oct 01 14:03:11 crc kubenswrapper[4605]: I1001 14:03:11.183063 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f759e02e-90e1-4fa7-b0e5-d79e726d077c-dns-svc\") pod \"dnsmasq-dns-59d5ff467f-hvfr5\" (UID: \"f759e02e-90e1-4fa7-b0e5-d79e726d077c\") " pod="openstack/dnsmasq-dns-59d5ff467f-hvfr5" Oct 01 14:03:11 crc kubenswrapper[4605]: I1001 14:03:11.184961 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f759e02e-90e1-4fa7-b0e5-d79e726d077c-dns-swift-storage-0\") pod \"dnsmasq-dns-59d5ff467f-hvfr5\" (UID: \"f759e02e-90e1-4fa7-b0e5-d79e726d077c\") " pod="openstack/dnsmasq-dns-59d5ff467f-hvfr5" Oct 01 14:03:11 crc kubenswrapper[4605]: I1001 14:03:11.185033 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f759e02e-90e1-4fa7-b0e5-d79e726d077c-config\") pod \"dnsmasq-dns-59d5ff467f-hvfr5\" (UID: \"f759e02e-90e1-4fa7-b0e5-d79e726d077c\") " pod="openstack/dnsmasq-dns-59d5ff467f-hvfr5" Oct 01 14:03:11 crc kubenswrapper[4605]: I1001 14:03:11.185070 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f759e02e-90e1-4fa7-b0e5-d79e726d077c-ovsdbserver-nb\") pod \"dnsmasq-dns-59d5ff467f-hvfr5\" (UID: \"f759e02e-90e1-4fa7-b0e5-d79e726d077c\") " pod="openstack/dnsmasq-dns-59d5ff467f-hvfr5" Oct 01 14:03:11 crc kubenswrapper[4605]: I1001 14:03:11.185208 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f759e02e-90e1-4fa7-b0e5-d79e726d077c-ovsdbserver-sb\") pod \"dnsmasq-dns-59d5ff467f-hvfr5\" (UID: \"f759e02e-90e1-4fa7-b0e5-d79e726d077c\") " pod="openstack/dnsmasq-dns-59d5ff467f-hvfr5" Oct 01 14:03:11 crc kubenswrapper[4605]: I1001 14:03:11.186076 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f759e02e-90e1-4fa7-b0e5-d79e726d077c-ovsdbserver-sb\") pod \"dnsmasq-dns-59d5ff467f-hvfr5\" (UID: \"f759e02e-90e1-4fa7-b0e5-d79e726d077c\") " pod="openstack/dnsmasq-dns-59d5ff467f-hvfr5" Oct 01 14:03:11 crc kubenswrapper[4605]: I1001 14:03:11.186731 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f759e02e-90e1-4fa7-b0e5-d79e726d077c-ovsdbserver-nb\") pod \"dnsmasq-dns-59d5ff467f-hvfr5\" (UID: \"f759e02e-90e1-4fa7-b0e5-d79e726d077c\") " pod="openstack/dnsmasq-dns-59d5ff467f-hvfr5" Oct 01 14:03:11 crc kubenswrapper[4605]: I1001 14:03:11.186860 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f759e02e-90e1-4fa7-b0e5-d79e726d077c-config\") pod \"dnsmasq-dns-59d5ff467f-hvfr5\" (UID: \"f759e02e-90e1-4fa7-b0e5-d79e726d077c\") " pod="openstack/dnsmasq-dns-59d5ff467f-hvfr5" Oct 01 14:03:11 crc kubenswrapper[4605]: I1001 14:03:11.187741 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f759e02e-90e1-4fa7-b0e5-d79e726d077c-dns-swift-storage-0\") pod \"dnsmasq-dns-59d5ff467f-hvfr5\" (UID: \"f759e02e-90e1-4fa7-b0e5-d79e726d077c\") " pod="openstack/dnsmasq-dns-59d5ff467f-hvfr5" Oct 01 14:03:11 crc kubenswrapper[4605]: I1001 14:03:11.207309 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-558ff4bf9d-h49gp"] Oct 01 14:03:11 crc kubenswrapper[4605]: I1001 14:03:11.208991 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-558ff4bf9d-h49gp" Oct 01 14:03:11 crc kubenswrapper[4605]: I1001 14:03:11.215903 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-558ff4bf9d-h49gp"] Oct 01 14:03:11 crc kubenswrapper[4605]: I1001 14:03:11.217925 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Oct 01 14:03:11 crc kubenswrapper[4605]: I1001 14:03:11.231822 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f992w\" (UniqueName: \"kubernetes.io/projected/f759e02e-90e1-4fa7-b0e5-d79e726d077c-kube-api-access-f992w\") pod \"dnsmasq-dns-59d5ff467f-hvfr5\" (UID: \"f759e02e-90e1-4fa7-b0e5-d79e726d077c\") " pod="openstack/dnsmasq-dns-59d5ff467f-hvfr5" Oct 01 14:03:11 crc kubenswrapper[4605]: I1001 14:03:11.290198 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c70fd20-00d7-4bbd-9f5d-67b674a92e84-config-data\") pod \"barbican-api-558ff4bf9d-h49gp\" (UID: \"1c70fd20-00d7-4bbd-9f5d-67b674a92e84\") " pod="openstack/barbican-api-558ff4bf9d-h49gp" Oct 01 14:03:11 crc kubenswrapper[4605]: I1001 14:03:11.290279 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1c70fd20-00d7-4bbd-9f5d-67b674a92e84-logs\") pod \"barbican-api-558ff4bf9d-h49gp\" (UID: \"1c70fd20-00d7-4bbd-9f5d-67b674a92e84\") " pod="openstack/barbican-api-558ff4bf9d-h49gp" Oct 01 14:03:11 crc kubenswrapper[4605]: I1001 14:03:11.290342 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1c70fd20-00d7-4bbd-9f5d-67b674a92e84-config-data-custom\") pod \"barbican-api-558ff4bf9d-h49gp\" (UID: \"1c70fd20-00d7-4bbd-9f5d-67b674a92e84\") " pod="openstack/barbican-api-558ff4bf9d-h49gp" Oct 01 14:03:11 crc kubenswrapper[4605]: I1001 14:03:11.290363 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cttzr\" (UniqueName: \"kubernetes.io/projected/1c70fd20-00d7-4bbd-9f5d-67b674a92e84-kube-api-access-cttzr\") pod \"barbican-api-558ff4bf9d-h49gp\" (UID: \"1c70fd20-00d7-4bbd-9f5d-67b674a92e84\") " pod="openstack/barbican-api-558ff4bf9d-h49gp" Oct 01 14:03:11 crc kubenswrapper[4605]: I1001 14:03:11.290393 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c70fd20-00d7-4bbd-9f5d-67b674a92e84-combined-ca-bundle\") pod \"barbican-api-558ff4bf9d-h49gp\" (UID: \"1c70fd20-00d7-4bbd-9f5d-67b674a92e84\") " pod="openstack/barbican-api-558ff4bf9d-h49gp" Oct 01 14:03:11 crc kubenswrapper[4605]: I1001 14:03:11.367998 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-66797cbc6-zrzb7" Oct 01 14:03:11 crc kubenswrapper[4605]: I1001 14:03:11.395133 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c70fd20-00d7-4bbd-9f5d-67b674a92e84-config-data\") pod \"barbican-api-558ff4bf9d-h49gp\" (UID: \"1c70fd20-00d7-4bbd-9f5d-67b674a92e84\") " pod="openstack/barbican-api-558ff4bf9d-h49gp" Oct 01 14:03:11 crc kubenswrapper[4605]: I1001 14:03:11.395465 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1c70fd20-00d7-4bbd-9f5d-67b674a92e84-logs\") pod \"barbican-api-558ff4bf9d-h49gp\" (UID: \"1c70fd20-00d7-4bbd-9f5d-67b674a92e84\") " pod="openstack/barbican-api-558ff4bf9d-h49gp" Oct 01 14:03:11 crc kubenswrapper[4605]: I1001 14:03:11.395537 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1c70fd20-00d7-4bbd-9f5d-67b674a92e84-config-data-custom\") pod \"barbican-api-558ff4bf9d-h49gp\" (UID: \"1c70fd20-00d7-4bbd-9f5d-67b674a92e84\") " pod="openstack/barbican-api-558ff4bf9d-h49gp" Oct 01 14:03:11 crc kubenswrapper[4605]: I1001 14:03:11.395556 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cttzr\" (UniqueName: \"kubernetes.io/projected/1c70fd20-00d7-4bbd-9f5d-67b674a92e84-kube-api-access-cttzr\") pod \"barbican-api-558ff4bf9d-h49gp\" (UID: \"1c70fd20-00d7-4bbd-9f5d-67b674a92e84\") " pod="openstack/barbican-api-558ff4bf9d-h49gp" Oct 01 14:03:11 crc kubenswrapper[4605]: I1001 14:03:11.395586 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c70fd20-00d7-4bbd-9f5d-67b674a92e84-combined-ca-bundle\") pod \"barbican-api-558ff4bf9d-h49gp\" (UID: \"1c70fd20-00d7-4bbd-9f5d-67b674a92e84\") " pod="openstack/barbican-api-558ff4bf9d-h49gp" Oct 01 14:03:11 crc kubenswrapper[4605]: I1001 14:03:11.398060 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1c70fd20-00d7-4bbd-9f5d-67b674a92e84-logs\") pod \"barbican-api-558ff4bf9d-h49gp\" (UID: \"1c70fd20-00d7-4bbd-9f5d-67b674a92e84\") " pod="openstack/barbican-api-558ff4bf9d-h49gp" Oct 01 14:03:11 crc kubenswrapper[4605]: I1001 14:03:11.404191 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c70fd20-00d7-4bbd-9f5d-67b674a92e84-config-data\") pod \"barbican-api-558ff4bf9d-h49gp\" (UID: \"1c70fd20-00d7-4bbd-9f5d-67b674a92e84\") " pod="openstack/barbican-api-558ff4bf9d-h49gp" Oct 01 14:03:11 crc kubenswrapper[4605]: I1001 14:03:11.404746 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c70fd20-00d7-4bbd-9f5d-67b674a92e84-combined-ca-bundle\") pod \"barbican-api-558ff4bf9d-h49gp\" (UID: \"1c70fd20-00d7-4bbd-9f5d-67b674a92e84\") " pod="openstack/barbican-api-558ff4bf9d-h49gp" Oct 01 14:03:11 crc kubenswrapper[4605]: I1001 14:03:11.408375 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1c70fd20-00d7-4bbd-9f5d-67b674a92e84-config-data-custom\") pod \"barbican-api-558ff4bf9d-h49gp\" (UID: \"1c70fd20-00d7-4bbd-9f5d-67b674a92e84\") " pod="openstack/barbican-api-558ff4bf9d-h49gp" Oct 01 14:03:11 crc kubenswrapper[4605]: I1001 14:03:11.430736 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cttzr\" (UniqueName: \"kubernetes.io/projected/1c70fd20-00d7-4bbd-9f5d-67b674a92e84-kube-api-access-cttzr\") pod \"barbican-api-558ff4bf9d-h49gp\" (UID: \"1c70fd20-00d7-4bbd-9f5d-67b674a92e84\") " pod="openstack/barbican-api-558ff4bf9d-h49gp" Oct 01 14:03:11 crc kubenswrapper[4605]: I1001 14:03:11.437738 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59d5ff467f-hvfr5" Oct 01 14:03:11 crc kubenswrapper[4605]: I1001 14:03:11.625597 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-558ff4bf9d-h49gp" Oct 01 14:03:12 crc kubenswrapper[4605]: I1001 14:03:12.057512 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-798f8d7567-pfjfn"] Oct 01 14:03:12 crc kubenswrapper[4605]: I1001 14:03:12.098018 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-66797cbc6-zrzb7"] Oct 01 14:03:12 crc kubenswrapper[4605]: I1001 14:03:12.253467 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-59d5ff467f-hvfr5"] Oct 01 14:03:12 crc kubenswrapper[4605]: I1001 14:03:12.411157 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-558ff4bf9d-h49gp"] Oct 01 14:03:12 crc kubenswrapper[4605]: I1001 14:03:12.552718 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"45659557-27c7-4a59-afbf-27c09718d6f7","Type":"ContainerStarted","Data":"a7165cd54ef4832d478f584e30401a0459bd23248c5f67491ba5551bbb78b95d"} Oct 01 14:03:12 crc kubenswrapper[4605]: I1001 14:03:12.552896 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 01 14:03:12 crc kubenswrapper[4605]: I1001 14:03:12.559506 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59d5ff467f-hvfr5" event={"ID":"f759e02e-90e1-4fa7-b0e5-d79e726d077c","Type":"ContainerStarted","Data":"cb598ef07277a78d3ab4bfaa10cae42ffce108bb35298e5eeb931894adf0fbb1"} Oct 01 14:03:12 crc kubenswrapper[4605]: I1001 14:03:12.563856 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-66797cbc6-zrzb7" event={"ID":"991f1482-e0da-43ae-89c5-bb9a5beaee2f","Type":"ContainerStarted","Data":"fd8352d43a729639bcdfd7938b75f80eaedadc7f1a4b9da2f7167bc61ee1608d"} Oct 01 14:03:12 crc kubenswrapper[4605]: I1001 14:03:12.568233 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-558ff4bf9d-h49gp" event={"ID":"1c70fd20-00d7-4bbd-9f5d-67b674a92e84","Type":"ContainerStarted","Data":"1f32c21d82de8cbd74e4a04032fbfbfc1bcbbbe642a994499875cf7213a578ee"} Oct 01 14:03:12 crc kubenswrapper[4605]: I1001 14:03:12.570246 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-798f8d7567-pfjfn" event={"ID":"6f8f7068-62be-4ee7-9f6a-63812a2f5413","Type":"ContainerStarted","Data":"b118482df6cd8144b99b86711cf31c17db6b03df8f21c148f2ecf2a0e9795ac1"} Oct 01 14:03:12 crc kubenswrapper[4605]: I1001 14:03:12.574728 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.421348314 podStartE2EDuration="7.574710455s" podCreationTimestamp="2025-10-01 14:03:05 +0000 UTC" firstStartedPulling="2025-10-01 14:03:06.636902394 +0000 UTC m=+1109.380878612" lastFinishedPulling="2025-10-01 14:03:11.790264555 +0000 UTC m=+1114.534240753" observedRunningTime="2025-10-01 14:03:12.568825246 +0000 UTC m=+1115.312801454" watchObservedRunningTime="2025-10-01 14:03:12.574710455 +0000 UTC m=+1115.318686653" Oct 01 14:03:13 crc kubenswrapper[4605]: I1001 14:03:13.592261 4605 generic.go:334] "Generic (PLEG): container finished" podID="f759e02e-90e1-4fa7-b0e5-d79e726d077c" containerID="3115c78b273f3871b10c0ad240390f403b3c55939e7af8249390cd5e58e99aa9" exitCode=0 Oct 01 14:03:13 crc kubenswrapper[4605]: I1001 14:03:13.592852 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59d5ff467f-hvfr5" event={"ID":"f759e02e-90e1-4fa7-b0e5-d79e726d077c","Type":"ContainerDied","Data":"3115c78b273f3871b10c0ad240390f403b3c55939e7af8249390cd5e58e99aa9"} Oct 01 14:03:13 crc kubenswrapper[4605]: I1001 14:03:13.602225 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-558ff4bf9d-h49gp" event={"ID":"1c70fd20-00d7-4bbd-9f5d-67b674a92e84","Type":"ContainerStarted","Data":"55f7603b22bd09a377822f92e6b22c4bd608fe8fa42097306988fd22c8d52d95"} Oct 01 14:03:13 crc kubenswrapper[4605]: I1001 14:03:13.602270 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-558ff4bf9d-h49gp" event={"ID":"1c70fd20-00d7-4bbd-9f5d-67b674a92e84","Type":"ContainerStarted","Data":"61da8199bcc9a0bdcbbf44a1d6207513c89c9d2974c0410fc8db3ad2767b1021"} Oct 01 14:03:13 crc kubenswrapper[4605]: I1001 14:03:13.602289 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-558ff4bf9d-h49gp" Oct 01 14:03:13 crc kubenswrapper[4605]: I1001 14:03:13.602302 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-558ff4bf9d-h49gp" Oct 01 14:03:13 crc kubenswrapper[4605]: I1001 14:03:13.672340 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-558ff4bf9d-h49gp" podStartSLOduration=2.672320578 podStartE2EDuration="2.672320578s" podCreationTimestamp="2025-10-01 14:03:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 14:03:13.634406838 +0000 UTC m=+1116.378383046" watchObservedRunningTime="2025-10-01 14:03:13.672320578 +0000 UTC m=+1116.416296786" Oct 01 14:03:14 crc kubenswrapper[4605]: I1001 14:03:14.271595 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-59b677b5cd-92trn"] Oct 01 14:03:14 crc kubenswrapper[4605]: I1001 14:03:14.282138 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-59b677b5cd-92trn" Oct 01 14:03:14 crc kubenswrapper[4605]: I1001 14:03:14.285396 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Oct 01 14:03:14 crc kubenswrapper[4605]: I1001 14:03:14.296617 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-59b677b5cd-92trn"] Oct 01 14:03:14 crc kubenswrapper[4605]: I1001 14:03:14.298117 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Oct 01 14:03:14 crc kubenswrapper[4605]: I1001 14:03:14.367810 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b92ac133-ded4-4276-a43a-7d9414d051ab-public-tls-certs\") pod \"barbican-api-59b677b5cd-92trn\" (UID: \"b92ac133-ded4-4276-a43a-7d9414d051ab\") " pod="openstack/barbican-api-59b677b5cd-92trn" Oct 01 14:03:14 crc kubenswrapper[4605]: I1001 14:03:14.367914 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b92ac133-ded4-4276-a43a-7d9414d051ab-combined-ca-bundle\") pod \"barbican-api-59b677b5cd-92trn\" (UID: \"b92ac133-ded4-4276-a43a-7d9414d051ab\") " pod="openstack/barbican-api-59b677b5cd-92trn" Oct 01 14:03:14 crc kubenswrapper[4605]: I1001 14:03:14.367961 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b92ac133-ded4-4276-a43a-7d9414d051ab-config-data-custom\") pod \"barbican-api-59b677b5cd-92trn\" (UID: \"b92ac133-ded4-4276-a43a-7d9414d051ab\") " pod="openstack/barbican-api-59b677b5cd-92trn" Oct 01 14:03:14 crc kubenswrapper[4605]: I1001 14:03:14.367980 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b92ac133-ded4-4276-a43a-7d9414d051ab-internal-tls-certs\") pod \"barbican-api-59b677b5cd-92trn\" (UID: \"b92ac133-ded4-4276-a43a-7d9414d051ab\") " pod="openstack/barbican-api-59b677b5cd-92trn" Oct 01 14:03:14 crc kubenswrapper[4605]: I1001 14:03:14.368007 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b92ac133-ded4-4276-a43a-7d9414d051ab-config-data\") pod \"barbican-api-59b677b5cd-92trn\" (UID: \"b92ac133-ded4-4276-a43a-7d9414d051ab\") " pod="openstack/barbican-api-59b677b5cd-92trn" Oct 01 14:03:14 crc kubenswrapper[4605]: I1001 14:03:14.368029 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4tbtn\" (UniqueName: \"kubernetes.io/projected/b92ac133-ded4-4276-a43a-7d9414d051ab-kube-api-access-4tbtn\") pod \"barbican-api-59b677b5cd-92trn\" (UID: \"b92ac133-ded4-4276-a43a-7d9414d051ab\") " pod="openstack/barbican-api-59b677b5cd-92trn" Oct 01 14:03:14 crc kubenswrapper[4605]: I1001 14:03:14.368076 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b92ac133-ded4-4276-a43a-7d9414d051ab-logs\") pod \"barbican-api-59b677b5cd-92trn\" (UID: \"b92ac133-ded4-4276-a43a-7d9414d051ab\") " pod="openstack/barbican-api-59b677b5cd-92trn" Oct 01 14:03:14 crc kubenswrapper[4605]: I1001 14:03:14.469640 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b92ac133-ded4-4276-a43a-7d9414d051ab-combined-ca-bundle\") pod \"barbican-api-59b677b5cd-92trn\" (UID: \"b92ac133-ded4-4276-a43a-7d9414d051ab\") " pod="openstack/barbican-api-59b677b5cd-92trn" Oct 01 14:03:14 crc kubenswrapper[4605]: I1001 14:03:14.469757 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b92ac133-ded4-4276-a43a-7d9414d051ab-config-data-custom\") pod \"barbican-api-59b677b5cd-92trn\" (UID: \"b92ac133-ded4-4276-a43a-7d9414d051ab\") " pod="openstack/barbican-api-59b677b5cd-92trn" Oct 01 14:03:14 crc kubenswrapper[4605]: I1001 14:03:14.469783 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b92ac133-ded4-4276-a43a-7d9414d051ab-internal-tls-certs\") pod \"barbican-api-59b677b5cd-92trn\" (UID: \"b92ac133-ded4-4276-a43a-7d9414d051ab\") " pod="openstack/barbican-api-59b677b5cd-92trn" Oct 01 14:03:14 crc kubenswrapper[4605]: I1001 14:03:14.469841 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b92ac133-ded4-4276-a43a-7d9414d051ab-config-data\") pod \"barbican-api-59b677b5cd-92trn\" (UID: \"b92ac133-ded4-4276-a43a-7d9414d051ab\") " pod="openstack/barbican-api-59b677b5cd-92trn" Oct 01 14:03:14 crc kubenswrapper[4605]: I1001 14:03:14.469872 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4tbtn\" (UniqueName: \"kubernetes.io/projected/b92ac133-ded4-4276-a43a-7d9414d051ab-kube-api-access-4tbtn\") pod \"barbican-api-59b677b5cd-92trn\" (UID: \"b92ac133-ded4-4276-a43a-7d9414d051ab\") " pod="openstack/barbican-api-59b677b5cd-92trn" Oct 01 14:03:14 crc kubenswrapper[4605]: I1001 14:03:14.469982 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b92ac133-ded4-4276-a43a-7d9414d051ab-logs\") pod \"barbican-api-59b677b5cd-92trn\" (UID: \"b92ac133-ded4-4276-a43a-7d9414d051ab\") " pod="openstack/barbican-api-59b677b5cd-92trn" Oct 01 14:03:14 crc kubenswrapper[4605]: I1001 14:03:14.470023 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b92ac133-ded4-4276-a43a-7d9414d051ab-public-tls-certs\") pod \"barbican-api-59b677b5cd-92trn\" (UID: \"b92ac133-ded4-4276-a43a-7d9414d051ab\") " pod="openstack/barbican-api-59b677b5cd-92trn" Oct 01 14:03:14 crc kubenswrapper[4605]: I1001 14:03:14.470863 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b92ac133-ded4-4276-a43a-7d9414d051ab-logs\") pod \"barbican-api-59b677b5cd-92trn\" (UID: \"b92ac133-ded4-4276-a43a-7d9414d051ab\") " pod="openstack/barbican-api-59b677b5cd-92trn" Oct 01 14:03:14 crc kubenswrapper[4605]: I1001 14:03:14.479195 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b92ac133-ded4-4276-a43a-7d9414d051ab-config-data-custom\") pod \"barbican-api-59b677b5cd-92trn\" (UID: \"b92ac133-ded4-4276-a43a-7d9414d051ab\") " pod="openstack/barbican-api-59b677b5cd-92trn" Oct 01 14:03:14 crc kubenswrapper[4605]: I1001 14:03:14.479385 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b92ac133-ded4-4276-a43a-7d9414d051ab-internal-tls-certs\") pod \"barbican-api-59b677b5cd-92trn\" (UID: \"b92ac133-ded4-4276-a43a-7d9414d051ab\") " pod="openstack/barbican-api-59b677b5cd-92trn" Oct 01 14:03:14 crc kubenswrapper[4605]: I1001 14:03:14.479477 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b92ac133-ded4-4276-a43a-7d9414d051ab-public-tls-certs\") pod \"barbican-api-59b677b5cd-92trn\" (UID: \"b92ac133-ded4-4276-a43a-7d9414d051ab\") " pod="openstack/barbican-api-59b677b5cd-92trn" Oct 01 14:03:14 crc kubenswrapper[4605]: I1001 14:03:14.487978 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b92ac133-ded4-4276-a43a-7d9414d051ab-combined-ca-bundle\") pod \"barbican-api-59b677b5cd-92trn\" (UID: \"b92ac133-ded4-4276-a43a-7d9414d051ab\") " pod="openstack/barbican-api-59b677b5cd-92trn" Oct 01 14:03:14 crc kubenswrapper[4605]: I1001 14:03:14.490402 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b92ac133-ded4-4276-a43a-7d9414d051ab-config-data\") pod \"barbican-api-59b677b5cd-92trn\" (UID: \"b92ac133-ded4-4276-a43a-7d9414d051ab\") " pod="openstack/barbican-api-59b677b5cd-92trn" Oct 01 14:03:14 crc kubenswrapper[4605]: I1001 14:03:14.496137 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4tbtn\" (UniqueName: \"kubernetes.io/projected/b92ac133-ded4-4276-a43a-7d9414d051ab-kube-api-access-4tbtn\") pod \"barbican-api-59b677b5cd-92trn\" (UID: \"b92ac133-ded4-4276-a43a-7d9414d051ab\") " pod="openstack/barbican-api-59b677b5cd-92trn" Oct 01 14:03:14 crc kubenswrapper[4605]: I1001 14:03:14.617581 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-59b677b5cd-92trn" Oct 01 14:03:15 crc kubenswrapper[4605]: I1001 14:03:15.673212 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-59b677b5cd-92trn"] Oct 01 14:03:15 crc kubenswrapper[4605]: I1001 14:03:15.701827 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59d5ff467f-hvfr5" event={"ID":"f759e02e-90e1-4fa7-b0e5-d79e726d077c","Type":"ContainerStarted","Data":"f67a4eb55a8524dd81d74d58905b8dee29bb3399f308c031f467921c58de6883"} Oct 01 14:03:15 crc kubenswrapper[4605]: I1001 14:03:15.702974 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-59d5ff467f-hvfr5" Oct 01 14:03:15 crc kubenswrapper[4605]: I1001 14:03:15.723320 4605 generic.go:334] "Generic (PLEG): container finished" podID="287fa988-b116-4b4d-a02c-990e801124d0" containerID="5dd8a3eef8be60b024489f72e74f8f402a7c7268e1358dd126a7e73114f67f4a" exitCode=0 Oct 01 14:03:15 crc kubenswrapper[4605]: I1001 14:03:15.723425 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-978c2" event={"ID":"287fa988-b116-4b4d-a02c-990e801124d0","Type":"ContainerDied","Data":"5dd8a3eef8be60b024489f72e74f8f402a7c7268e1358dd126a7e73114f67f4a"} Oct 01 14:03:15 crc kubenswrapper[4605]: I1001 14:03:15.757867 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-798f8d7567-pfjfn" event={"ID":"6f8f7068-62be-4ee7-9f6a-63812a2f5413","Type":"ContainerStarted","Data":"d921752d525639c979e1ebd4951dc51def01d29ba7b760e34d2fad1af0171828"} Oct 01 14:03:15 crc kubenswrapper[4605]: I1001 14:03:15.770912 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-59d5ff467f-hvfr5" podStartSLOduration=5.770895241 podStartE2EDuration="5.770895241s" podCreationTimestamp="2025-10-01 14:03:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 14:03:15.758311742 +0000 UTC m=+1118.502287950" watchObservedRunningTime="2025-10-01 14:03:15.770895241 +0000 UTC m=+1118.514871449" Oct 01 14:03:16 crc kubenswrapper[4605]: I1001 14:03:16.770764 4605 generic.go:334] "Generic (PLEG): container finished" podID="f17ed625-db31-40c7-9958-f4b89f66ffa0" containerID="f181348df26d79ee4c6e63fb8ad765de7771b451455895a1ee5602ae1069ed2e" exitCode=0 Oct 01 14:03:16 crc kubenswrapper[4605]: I1001 14:03:16.770865 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-bkj69" event={"ID":"f17ed625-db31-40c7-9958-f4b89f66ffa0","Type":"ContainerDied","Data":"f181348df26d79ee4c6e63fb8ad765de7771b451455895a1ee5602ae1069ed2e"} Oct 01 14:03:16 crc kubenswrapper[4605]: I1001 14:03:16.772981 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-59b677b5cd-92trn" event={"ID":"b92ac133-ded4-4276-a43a-7d9414d051ab","Type":"ContainerStarted","Data":"72b71b6b89b1241165729088781b24adb9476b6e076e4fc368360c5cd69741a6"} Oct 01 14:03:16 crc kubenswrapper[4605]: I1001 14:03:16.773017 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-59b677b5cd-92trn" event={"ID":"b92ac133-ded4-4276-a43a-7d9414d051ab","Type":"ContainerStarted","Data":"6d0523e19d0a631eb46d128df3450f3e6152f651018c45cdebc061a67c121ddc"} Oct 01 14:03:16 crc kubenswrapper[4605]: I1001 14:03:16.773029 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-59b677b5cd-92trn" event={"ID":"b92ac133-ded4-4276-a43a-7d9414d051ab","Type":"ContainerStarted","Data":"6188217410fe2f1a15ac599b40e925cb61e74ff70aeea20f26af207f64ecfeb1"} Oct 01 14:03:16 crc kubenswrapper[4605]: I1001 14:03:16.773429 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-59b677b5cd-92trn" Oct 01 14:03:16 crc kubenswrapper[4605]: I1001 14:03:16.778636 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-798f8d7567-pfjfn" event={"ID":"6f8f7068-62be-4ee7-9f6a-63812a2f5413","Type":"ContainerStarted","Data":"73232f242ddac9c4eee523f66675c01a0bc940f9daf4fcbebfa1234d14e928ff"} Oct 01 14:03:16 crc kubenswrapper[4605]: I1001 14:03:16.781260 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-66797cbc6-zrzb7" event={"ID":"991f1482-e0da-43ae-89c5-bb9a5beaee2f","Type":"ContainerStarted","Data":"ffcee74a21a905d3bc642384ecd4ce6dc01e90cab5ec0c6b3579e105afe16d84"} Oct 01 14:03:16 crc kubenswrapper[4605]: I1001 14:03:16.781296 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-66797cbc6-zrzb7" event={"ID":"991f1482-e0da-43ae-89c5-bb9a5beaee2f","Type":"ContainerStarted","Data":"780cc797aa3d127b9b5151796126410e49fa5b18f502d4c9d3d3518a264bfd7a"} Oct 01 14:03:16 crc kubenswrapper[4605]: I1001 14:03:16.819863 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-66797cbc6-zrzb7" podStartSLOduration=3.66251448 podStartE2EDuration="6.819840683s" podCreationTimestamp="2025-10-01 14:03:10 +0000 UTC" firstStartedPulling="2025-10-01 14:03:12.078285183 +0000 UTC m=+1114.822261391" lastFinishedPulling="2025-10-01 14:03:15.235611386 +0000 UTC m=+1117.979587594" observedRunningTime="2025-10-01 14:03:16.815891273 +0000 UTC m=+1119.559867481" watchObservedRunningTime="2025-10-01 14:03:16.819840683 +0000 UTC m=+1119.563816891" Oct 01 14:03:16 crc kubenswrapper[4605]: I1001 14:03:16.839001 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-59b677b5cd-92trn" podStartSLOduration=2.838984858 podStartE2EDuration="2.838984858s" podCreationTimestamp="2025-10-01 14:03:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 14:03:16.838429754 +0000 UTC m=+1119.582405962" watchObservedRunningTime="2025-10-01 14:03:16.838984858 +0000 UTC m=+1119.582961066" Oct 01 14:03:16 crc kubenswrapper[4605]: I1001 14:03:16.883313 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-798f8d7567-pfjfn" podStartSLOduration=3.748687862 podStartE2EDuration="6.883283969s" podCreationTimestamp="2025-10-01 14:03:10 +0000 UTC" firstStartedPulling="2025-10-01 14:03:12.101314216 +0000 UTC m=+1114.845290424" lastFinishedPulling="2025-10-01 14:03:15.235910323 +0000 UTC m=+1117.979886531" observedRunningTime="2025-10-01 14:03:16.872127726 +0000 UTC m=+1119.616103944" watchObservedRunningTime="2025-10-01 14:03:16.883283969 +0000 UTC m=+1119.627260177" Oct 01 14:03:17 crc kubenswrapper[4605]: I1001 14:03:17.277033 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-978c2" Oct 01 14:03:17 crc kubenswrapper[4605]: I1001 14:03:17.359363 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/287fa988-b116-4b4d-a02c-990e801124d0-etc-machine-id\") pod \"287fa988-b116-4b4d-a02c-990e801124d0\" (UID: \"287fa988-b116-4b4d-a02c-990e801124d0\") " Oct 01 14:03:17 crc kubenswrapper[4605]: I1001 14:03:17.359444 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/287fa988-b116-4b4d-a02c-990e801124d0-db-sync-config-data\") pod \"287fa988-b116-4b4d-a02c-990e801124d0\" (UID: \"287fa988-b116-4b4d-a02c-990e801124d0\") " Oct 01 14:03:17 crc kubenswrapper[4605]: I1001 14:03:17.359470 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/287fa988-b116-4b4d-a02c-990e801124d0-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "287fa988-b116-4b4d-a02c-990e801124d0" (UID: "287fa988-b116-4b4d-a02c-990e801124d0"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 14:03:17 crc kubenswrapper[4605]: I1001 14:03:17.359492 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/287fa988-b116-4b4d-a02c-990e801124d0-combined-ca-bundle\") pod \"287fa988-b116-4b4d-a02c-990e801124d0\" (UID: \"287fa988-b116-4b4d-a02c-990e801124d0\") " Oct 01 14:03:17 crc kubenswrapper[4605]: I1001 14:03:17.359573 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/287fa988-b116-4b4d-a02c-990e801124d0-scripts\") pod \"287fa988-b116-4b4d-a02c-990e801124d0\" (UID: \"287fa988-b116-4b4d-a02c-990e801124d0\") " Oct 01 14:03:17 crc kubenswrapper[4605]: I1001 14:03:17.359652 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/287fa988-b116-4b4d-a02c-990e801124d0-config-data\") pod \"287fa988-b116-4b4d-a02c-990e801124d0\" (UID: \"287fa988-b116-4b4d-a02c-990e801124d0\") " Oct 01 14:03:17 crc kubenswrapper[4605]: I1001 14:03:17.359709 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bvfhk\" (UniqueName: \"kubernetes.io/projected/287fa988-b116-4b4d-a02c-990e801124d0-kube-api-access-bvfhk\") pod \"287fa988-b116-4b4d-a02c-990e801124d0\" (UID: \"287fa988-b116-4b4d-a02c-990e801124d0\") " Oct 01 14:03:17 crc kubenswrapper[4605]: I1001 14:03:17.360184 4605 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/287fa988-b116-4b4d-a02c-990e801124d0-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 01 14:03:17 crc kubenswrapper[4605]: I1001 14:03:17.373222 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/287fa988-b116-4b4d-a02c-990e801124d0-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "287fa988-b116-4b4d-a02c-990e801124d0" (UID: "287fa988-b116-4b4d-a02c-990e801124d0"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:03:17 crc kubenswrapper[4605]: I1001 14:03:17.378143 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/287fa988-b116-4b4d-a02c-990e801124d0-kube-api-access-bvfhk" (OuterVolumeSpecName: "kube-api-access-bvfhk") pod "287fa988-b116-4b4d-a02c-990e801124d0" (UID: "287fa988-b116-4b4d-a02c-990e801124d0"). InnerVolumeSpecName "kube-api-access-bvfhk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:03:17 crc kubenswrapper[4605]: I1001 14:03:17.383984 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/287fa988-b116-4b4d-a02c-990e801124d0-scripts" (OuterVolumeSpecName: "scripts") pod "287fa988-b116-4b4d-a02c-990e801124d0" (UID: "287fa988-b116-4b4d-a02c-990e801124d0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:03:17 crc kubenswrapper[4605]: I1001 14:03:17.423311 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/287fa988-b116-4b4d-a02c-990e801124d0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "287fa988-b116-4b4d-a02c-990e801124d0" (UID: "287fa988-b116-4b4d-a02c-990e801124d0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:03:17 crc kubenswrapper[4605]: I1001 14:03:17.434375 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/287fa988-b116-4b4d-a02c-990e801124d0-config-data" (OuterVolumeSpecName: "config-data") pod "287fa988-b116-4b4d-a02c-990e801124d0" (UID: "287fa988-b116-4b4d-a02c-990e801124d0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:03:17 crc kubenswrapper[4605]: I1001 14:03:17.461822 4605 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/287fa988-b116-4b4d-a02c-990e801124d0-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 14:03:17 crc kubenswrapper[4605]: I1001 14:03:17.461861 4605 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/287fa988-b116-4b4d-a02c-990e801124d0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 14:03:17 crc kubenswrapper[4605]: I1001 14:03:17.461875 4605 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/287fa988-b116-4b4d-a02c-990e801124d0-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 14:03:17 crc kubenswrapper[4605]: I1001 14:03:17.461892 4605 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/287fa988-b116-4b4d-a02c-990e801124d0-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 14:03:17 crc kubenswrapper[4605]: I1001 14:03:17.461906 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bvfhk\" (UniqueName: \"kubernetes.io/projected/287fa988-b116-4b4d-a02c-990e801124d0-kube-api-access-bvfhk\") on node \"crc\" DevicePath \"\"" Oct 01 14:03:17 crc kubenswrapper[4605]: I1001 14:03:17.649532 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-64d6df575b-5ctbf" Oct 01 14:03:17 crc kubenswrapper[4605]: I1001 14:03:17.665692 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-749477d64b-5dpnm" Oct 01 14:03:17 crc kubenswrapper[4605]: I1001 14:03:17.795253 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-978c2" Oct 01 14:03:17 crc kubenswrapper[4605]: I1001 14:03:17.795357 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-978c2" event={"ID":"287fa988-b116-4b4d-a02c-990e801124d0","Type":"ContainerDied","Data":"53b88e1cb5dc2a7716904bc2dcf513a17220f39d8228e44d8172bd2e27ac0026"} Oct 01 14:03:17 crc kubenswrapper[4605]: I1001 14:03:17.795875 4605 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="53b88e1cb5dc2a7716904bc2dcf513a17220f39d8228e44d8172bd2e27ac0026" Oct 01 14:03:17 crc kubenswrapper[4605]: I1001 14:03:17.796247 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-59b677b5cd-92trn" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.120698 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Oct 01 14:03:18 crc kubenswrapper[4605]: E1001 14:03:18.121122 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="287fa988-b116-4b4d-a02c-990e801124d0" containerName="cinder-db-sync" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.121142 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="287fa988-b116-4b4d-a02c-990e801124d0" containerName="cinder-db-sync" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.121331 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="287fa988-b116-4b4d-a02c-990e801124d0" containerName="cinder-db-sync" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.122279 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.128649 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.128999 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.129131 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-7fqq4" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.129727 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.142588 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.178805 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/23411507-ae6e-438e-84e6-3505947e632b-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"23411507-ae6e-438e-84e6-3505947e632b\") " pod="openstack/cinder-scheduler-0" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.178877 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/23411507-ae6e-438e-84e6-3505947e632b-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"23411507-ae6e-438e-84e6-3505947e632b\") " pod="openstack/cinder-scheduler-0" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.178913 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23411507-ae6e-438e-84e6-3505947e632b-config-data\") pod \"cinder-scheduler-0\" (UID: \"23411507-ae6e-438e-84e6-3505947e632b\") " pod="openstack/cinder-scheduler-0" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.178978 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7kn8d\" (UniqueName: \"kubernetes.io/projected/23411507-ae6e-438e-84e6-3505947e632b-kube-api-access-7kn8d\") pod \"cinder-scheduler-0\" (UID: \"23411507-ae6e-438e-84e6-3505947e632b\") " pod="openstack/cinder-scheduler-0" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.179016 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23411507-ae6e-438e-84e6-3505947e632b-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"23411507-ae6e-438e-84e6-3505947e632b\") " pod="openstack/cinder-scheduler-0" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.179048 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/23411507-ae6e-438e-84e6-3505947e632b-scripts\") pod \"cinder-scheduler-0\" (UID: \"23411507-ae6e-438e-84e6-3505947e632b\") " pod="openstack/cinder-scheduler-0" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.221070 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-59d5ff467f-hvfr5"] Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.256996 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-69c986f6d7-h476p"] Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.258873 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-69c986f6d7-h476p" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.283134 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/23411507-ae6e-438e-84e6-3505947e632b-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"23411507-ae6e-438e-84e6-3505947e632b\") " pod="openstack/cinder-scheduler-0" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.283210 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/23411507-ae6e-438e-84e6-3505947e632b-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"23411507-ae6e-438e-84e6-3505947e632b\") " pod="openstack/cinder-scheduler-0" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.283247 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23411507-ae6e-438e-84e6-3505947e632b-config-data\") pod \"cinder-scheduler-0\" (UID: \"23411507-ae6e-438e-84e6-3505947e632b\") " pod="openstack/cinder-scheduler-0" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.283278 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7kn8d\" (UniqueName: \"kubernetes.io/projected/23411507-ae6e-438e-84e6-3505947e632b-kube-api-access-7kn8d\") pod \"cinder-scheduler-0\" (UID: \"23411507-ae6e-438e-84e6-3505947e632b\") " pod="openstack/cinder-scheduler-0" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.283300 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23411507-ae6e-438e-84e6-3505947e632b-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"23411507-ae6e-438e-84e6-3505947e632b\") " pod="openstack/cinder-scheduler-0" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.283328 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/23411507-ae6e-438e-84e6-3505947e632b-scripts\") pod \"cinder-scheduler-0\" (UID: \"23411507-ae6e-438e-84e6-3505947e632b\") " pod="openstack/cinder-scheduler-0" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.284108 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/23411507-ae6e-438e-84e6-3505947e632b-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"23411507-ae6e-438e-84e6-3505947e632b\") " pod="openstack/cinder-scheduler-0" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.290317 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/23411507-ae6e-438e-84e6-3505947e632b-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"23411507-ae6e-438e-84e6-3505947e632b\") " pod="openstack/cinder-scheduler-0" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.291397 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23411507-ae6e-438e-84e6-3505947e632b-config-data\") pod \"cinder-scheduler-0\" (UID: \"23411507-ae6e-438e-84e6-3505947e632b\") " pod="openstack/cinder-scheduler-0" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.297910 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/23411507-ae6e-438e-84e6-3505947e632b-scripts\") pod \"cinder-scheduler-0\" (UID: \"23411507-ae6e-438e-84e6-3505947e632b\") " pod="openstack/cinder-scheduler-0" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.309543 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-69c986f6d7-h476p"] Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.315754 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23411507-ae6e-438e-84e6-3505947e632b-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"23411507-ae6e-438e-84e6-3505947e632b\") " pod="openstack/cinder-scheduler-0" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.325631 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7kn8d\" (UniqueName: \"kubernetes.io/projected/23411507-ae6e-438e-84e6-3505947e632b-kube-api-access-7kn8d\") pod \"cinder-scheduler-0\" (UID: \"23411507-ae6e-438e-84e6-3505947e632b\") " pod="openstack/cinder-scheduler-0" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.384864 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-llc7c\" (UniqueName: \"kubernetes.io/projected/fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9-kube-api-access-llc7c\") pod \"dnsmasq-dns-69c986f6d7-h476p\" (UID: \"fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9\") " pod="openstack/dnsmasq-dns-69c986f6d7-h476p" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.384909 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9-dns-swift-storage-0\") pod \"dnsmasq-dns-69c986f6d7-h476p\" (UID: \"fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9\") " pod="openstack/dnsmasq-dns-69c986f6d7-h476p" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.384983 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9-ovsdbserver-nb\") pod \"dnsmasq-dns-69c986f6d7-h476p\" (UID: \"fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9\") " pod="openstack/dnsmasq-dns-69c986f6d7-h476p" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.385018 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9-dns-svc\") pod \"dnsmasq-dns-69c986f6d7-h476p\" (UID: \"fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9\") " pod="openstack/dnsmasq-dns-69c986f6d7-h476p" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.385060 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9-ovsdbserver-sb\") pod \"dnsmasq-dns-69c986f6d7-h476p\" (UID: \"fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9\") " pod="openstack/dnsmasq-dns-69c986f6d7-h476p" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.385082 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9-config\") pod \"dnsmasq-dns-69c986f6d7-h476p\" (UID: \"fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9\") " pod="openstack/dnsmasq-dns-69c986f6d7-h476p" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.449642 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.451356 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.458882 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.480838 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.487073 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9-dns-swift-storage-0\") pod \"dnsmasq-dns-69c986f6d7-h476p\" (UID: \"fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9\") " pod="openstack/dnsmasq-dns-69c986f6d7-h476p" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.487333 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9-ovsdbserver-nb\") pod \"dnsmasq-dns-69c986f6d7-h476p\" (UID: \"fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9\") " pod="openstack/dnsmasq-dns-69c986f6d7-h476p" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.487433 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9-dns-svc\") pod \"dnsmasq-dns-69c986f6d7-h476p\" (UID: \"fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9\") " pod="openstack/dnsmasq-dns-69c986f6d7-h476p" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.487531 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9-ovsdbserver-sb\") pod \"dnsmasq-dns-69c986f6d7-h476p\" (UID: \"fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9\") " pod="openstack/dnsmasq-dns-69c986f6d7-h476p" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.487614 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9-config\") pod \"dnsmasq-dns-69c986f6d7-h476p\" (UID: \"fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9\") " pod="openstack/dnsmasq-dns-69c986f6d7-h476p" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.487739 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-llc7c\" (UniqueName: \"kubernetes.io/projected/fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9-kube-api-access-llc7c\") pod \"dnsmasq-dns-69c986f6d7-h476p\" (UID: \"fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9\") " pod="openstack/dnsmasq-dns-69c986f6d7-h476p" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.489040 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9-dns-swift-storage-0\") pod \"dnsmasq-dns-69c986f6d7-h476p\" (UID: \"fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9\") " pod="openstack/dnsmasq-dns-69c986f6d7-h476p" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.489807 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9-ovsdbserver-nb\") pod \"dnsmasq-dns-69c986f6d7-h476p\" (UID: \"fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9\") " pod="openstack/dnsmasq-dns-69c986f6d7-h476p" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.490340 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9-ovsdbserver-sb\") pod \"dnsmasq-dns-69c986f6d7-h476p\" (UID: \"fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9\") " pod="openstack/dnsmasq-dns-69c986f6d7-h476p" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.490804 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9-dns-svc\") pod \"dnsmasq-dns-69c986f6d7-h476p\" (UID: \"fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9\") " pod="openstack/dnsmasq-dns-69c986f6d7-h476p" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.492513 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-bkj69" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.499160 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.500461 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9-config\") pod \"dnsmasq-dns-69c986f6d7-h476p\" (UID: \"fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9\") " pod="openstack/dnsmasq-dns-69c986f6d7-h476p" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.550693 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-llc7c\" (UniqueName: \"kubernetes.io/projected/fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9-kube-api-access-llc7c\") pod \"dnsmasq-dns-69c986f6d7-h476p\" (UID: \"fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9\") " pod="openstack/dnsmasq-dns-69c986f6d7-h476p" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.584620 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-69c986f6d7-h476p" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.594867 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f17ed625-db31-40c7-9958-f4b89f66ffa0-combined-ca-bundle\") pod \"f17ed625-db31-40c7-9958-f4b89f66ffa0\" (UID: \"f17ed625-db31-40c7-9958-f4b89f66ffa0\") " Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.594956 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/f17ed625-db31-40c7-9958-f4b89f66ffa0-config\") pod \"f17ed625-db31-40c7-9958-f4b89f66ffa0\" (UID: \"f17ed625-db31-40c7-9958-f4b89f66ffa0\") " Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.595157 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9bvjd\" (UniqueName: \"kubernetes.io/projected/f17ed625-db31-40c7-9958-f4b89f66ffa0-kube-api-access-9bvjd\") pod \"f17ed625-db31-40c7-9958-f4b89f66ffa0\" (UID: \"f17ed625-db31-40c7-9958-f4b89f66ffa0\") " Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.595368 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/899fd156-9323-4509-8caf-474844c688ef-logs\") pod \"cinder-api-0\" (UID: \"899fd156-9323-4509-8caf-474844c688ef\") " pod="openstack/cinder-api-0" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.595407 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/899fd156-9323-4509-8caf-474844c688ef-config-data\") pod \"cinder-api-0\" (UID: \"899fd156-9323-4509-8caf-474844c688ef\") " pod="openstack/cinder-api-0" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.595440 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/899fd156-9323-4509-8caf-474844c688ef-config-data-custom\") pod \"cinder-api-0\" (UID: \"899fd156-9323-4509-8caf-474844c688ef\") " pod="openstack/cinder-api-0" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.595473 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/899fd156-9323-4509-8caf-474844c688ef-scripts\") pod \"cinder-api-0\" (UID: \"899fd156-9323-4509-8caf-474844c688ef\") " pod="openstack/cinder-api-0" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.595516 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/899fd156-9323-4509-8caf-474844c688ef-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"899fd156-9323-4509-8caf-474844c688ef\") " pod="openstack/cinder-api-0" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.596001 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/899fd156-9323-4509-8caf-474844c688ef-etc-machine-id\") pod \"cinder-api-0\" (UID: \"899fd156-9323-4509-8caf-474844c688ef\") " pod="openstack/cinder-api-0" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.596055 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qcq5m\" (UniqueName: \"kubernetes.io/projected/899fd156-9323-4509-8caf-474844c688ef-kube-api-access-qcq5m\") pod \"cinder-api-0\" (UID: \"899fd156-9323-4509-8caf-474844c688ef\") " pod="openstack/cinder-api-0" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.663664 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f17ed625-db31-40c7-9958-f4b89f66ffa0-kube-api-access-9bvjd" (OuterVolumeSpecName: "kube-api-access-9bvjd") pod "f17ed625-db31-40c7-9958-f4b89f66ffa0" (UID: "f17ed625-db31-40c7-9958-f4b89f66ffa0"). InnerVolumeSpecName "kube-api-access-9bvjd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.699538 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/899fd156-9323-4509-8caf-474844c688ef-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"899fd156-9323-4509-8caf-474844c688ef\") " pod="openstack/cinder-api-0" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.699589 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/899fd156-9323-4509-8caf-474844c688ef-etc-machine-id\") pod \"cinder-api-0\" (UID: \"899fd156-9323-4509-8caf-474844c688ef\") " pod="openstack/cinder-api-0" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.699638 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qcq5m\" (UniqueName: \"kubernetes.io/projected/899fd156-9323-4509-8caf-474844c688ef-kube-api-access-qcq5m\") pod \"cinder-api-0\" (UID: \"899fd156-9323-4509-8caf-474844c688ef\") " pod="openstack/cinder-api-0" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.699694 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/899fd156-9323-4509-8caf-474844c688ef-logs\") pod \"cinder-api-0\" (UID: \"899fd156-9323-4509-8caf-474844c688ef\") " pod="openstack/cinder-api-0" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.699721 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/899fd156-9323-4509-8caf-474844c688ef-config-data\") pod \"cinder-api-0\" (UID: \"899fd156-9323-4509-8caf-474844c688ef\") " pod="openstack/cinder-api-0" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.699752 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/899fd156-9323-4509-8caf-474844c688ef-config-data-custom\") pod \"cinder-api-0\" (UID: \"899fd156-9323-4509-8caf-474844c688ef\") " pod="openstack/cinder-api-0" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.699787 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/899fd156-9323-4509-8caf-474844c688ef-scripts\") pod \"cinder-api-0\" (UID: \"899fd156-9323-4509-8caf-474844c688ef\") " pod="openstack/cinder-api-0" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.699836 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9bvjd\" (UniqueName: \"kubernetes.io/projected/f17ed625-db31-40c7-9958-f4b89f66ffa0-kube-api-access-9bvjd\") on node \"crc\" DevicePath \"\"" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.700177 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f17ed625-db31-40c7-9958-f4b89f66ffa0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f17ed625-db31-40c7-9958-f4b89f66ffa0" (UID: "f17ed625-db31-40c7-9958-f4b89f66ffa0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.703219 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/899fd156-9323-4509-8caf-474844c688ef-etc-machine-id\") pod \"cinder-api-0\" (UID: \"899fd156-9323-4509-8caf-474844c688ef\") " pod="openstack/cinder-api-0" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.704363 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/899fd156-9323-4509-8caf-474844c688ef-logs\") pod \"cinder-api-0\" (UID: \"899fd156-9323-4509-8caf-474844c688ef\") " pod="openstack/cinder-api-0" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.714018 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/899fd156-9323-4509-8caf-474844c688ef-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"899fd156-9323-4509-8caf-474844c688ef\") " pod="openstack/cinder-api-0" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.715535 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/899fd156-9323-4509-8caf-474844c688ef-config-data\") pod \"cinder-api-0\" (UID: \"899fd156-9323-4509-8caf-474844c688ef\") " pod="openstack/cinder-api-0" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.718746 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/899fd156-9323-4509-8caf-474844c688ef-scripts\") pod \"cinder-api-0\" (UID: \"899fd156-9323-4509-8caf-474844c688ef\") " pod="openstack/cinder-api-0" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.724016 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/899fd156-9323-4509-8caf-474844c688ef-config-data-custom\") pod \"cinder-api-0\" (UID: \"899fd156-9323-4509-8caf-474844c688ef\") " pod="openstack/cinder-api-0" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.744695 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f17ed625-db31-40c7-9958-f4b89f66ffa0-config" (OuterVolumeSpecName: "config") pod "f17ed625-db31-40c7-9958-f4b89f66ffa0" (UID: "f17ed625-db31-40c7-9958-f4b89f66ffa0"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.745432 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qcq5m\" (UniqueName: \"kubernetes.io/projected/899fd156-9323-4509-8caf-474844c688ef-kube-api-access-qcq5m\") pod \"cinder-api-0\" (UID: \"899fd156-9323-4509-8caf-474844c688ef\") " pod="openstack/cinder-api-0" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.801409 4605 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/f17ed625-db31-40c7-9958-f4b89f66ffa0-config\") on node \"crc\" DevicePath \"\"" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.801428 4605 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f17ed625-db31-40c7-9958-f4b89f66ffa0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.821317 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.885921 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-59d5ff467f-hvfr5" podUID="f759e02e-90e1-4fa7-b0e5-d79e726d077c" containerName="dnsmasq-dns" containerID="cri-o://f67a4eb55a8524dd81d74d58905b8dee29bb3399f308c031f467921c58de6883" gracePeriod=10 Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.886583 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-bkj69" Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.887038 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-bkj69" event={"ID":"f17ed625-db31-40c7-9958-f4b89f66ffa0","Type":"ContainerDied","Data":"83e880bcf317d527091672e01a35577addd00f2cd9712104cddd065490b937e5"} Oct 01 14:03:18 crc kubenswrapper[4605]: I1001 14:03:18.887068 4605 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="83e880bcf317d527091672e01a35577addd00f2cd9712104cddd065490b937e5" Oct 01 14:03:19 crc kubenswrapper[4605]: I1001 14:03:19.188698 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-69c986f6d7-h476p"] Oct 01 14:03:19 crc kubenswrapper[4605]: I1001 14:03:19.237813 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-8rr76"] Oct 01 14:03:19 crc kubenswrapper[4605]: E1001 14:03:19.238445 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f17ed625-db31-40c7-9958-f4b89f66ffa0" containerName="neutron-db-sync" Oct 01 14:03:19 crc kubenswrapper[4605]: I1001 14:03:19.238460 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="f17ed625-db31-40c7-9958-f4b89f66ffa0" containerName="neutron-db-sync" Oct 01 14:03:19 crc kubenswrapper[4605]: I1001 14:03:19.238643 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="f17ed625-db31-40c7-9958-f4b89f66ffa0" containerName="neutron-db-sync" Oct 01 14:03:19 crc kubenswrapper[4605]: I1001 14:03:19.239922 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5784cf869f-8rr76" Oct 01 14:03:19 crc kubenswrapper[4605]: I1001 14:03:19.265010 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-8rr76"] Oct 01 14:03:19 crc kubenswrapper[4605]: I1001 14:03:19.335918 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-76d9c5f468-cdf7s"] Oct 01 14:03:19 crc kubenswrapper[4605]: I1001 14:03:19.337569 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-76d9c5f468-cdf7s" Oct 01 14:03:19 crc kubenswrapper[4605]: I1001 14:03:19.343898 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ed331d86-1f29-4a49-a436-2e7f9c55a2d0-config\") pod \"dnsmasq-dns-5784cf869f-8rr76\" (UID: \"ed331d86-1f29-4a49-a436-2e7f9c55a2d0\") " pod="openstack/dnsmasq-dns-5784cf869f-8rr76" Oct 01 14:03:19 crc kubenswrapper[4605]: I1001 14:03:19.343973 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dj4qd\" (UniqueName: \"kubernetes.io/projected/ed331d86-1f29-4a49-a436-2e7f9c55a2d0-kube-api-access-dj4qd\") pod \"dnsmasq-dns-5784cf869f-8rr76\" (UID: \"ed331d86-1f29-4a49-a436-2e7f9c55a2d0\") " pod="openstack/dnsmasq-dns-5784cf869f-8rr76" Oct 01 14:03:19 crc kubenswrapper[4605]: I1001 14:03:19.344011 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ed331d86-1f29-4a49-a436-2e7f9c55a2d0-ovsdbserver-sb\") pod \"dnsmasq-dns-5784cf869f-8rr76\" (UID: \"ed331d86-1f29-4a49-a436-2e7f9c55a2d0\") " pod="openstack/dnsmasq-dns-5784cf869f-8rr76" Oct 01 14:03:19 crc kubenswrapper[4605]: I1001 14:03:19.344108 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ed331d86-1f29-4a49-a436-2e7f9c55a2d0-dns-svc\") pod \"dnsmasq-dns-5784cf869f-8rr76\" (UID: \"ed331d86-1f29-4a49-a436-2e7f9c55a2d0\") " pod="openstack/dnsmasq-dns-5784cf869f-8rr76" Oct 01 14:03:19 crc kubenswrapper[4605]: I1001 14:03:19.344138 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ed331d86-1f29-4a49-a436-2e7f9c55a2d0-ovsdbserver-nb\") pod \"dnsmasq-dns-5784cf869f-8rr76\" (UID: \"ed331d86-1f29-4a49-a436-2e7f9c55a2d0\") " pod="openstack/dnsmasq-dns-5784cf869f-8rr76" Oct 01 14:03:19 crc kubenswrapper[4605]: I1001 14:03:19.344179 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ed331d86-1f29-4a49-a436-2e7f9c55a2d0-dns-swift-storage-0\") pod \"dnsmasq-dns-5784cf869f-8rr76\" (UID: \"ed331d86-1f29-4a49-a436-2e7f9c55a2d0\") " pod="openstack/dnsmasq-dns-5784cf869f-8rr76" Oct 01 14:03:19 crc kubenswrapper[4605]: I1001 14:03:19.344786 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Oct 01 14:03:19 crc kubenswrapper[4605]: I1001 14:03:19.345034 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-5c4mb" Oct 01 14:03:19 crc kubenswrapper[4605]: I1001 14:03:19.345234 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Oct 01 14:03:19 crc kubenswrapper[4605]: I1001 14:03:19.344805 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Oct 01 14:03:19 crc kubenswrapper[4605]: I1001 14:03:19.351000 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-76d9c5f468-cdf7s"] Oct 01 14:03:19 crc kubenswrapper[4605]: I1001 14:03:19.447010 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ed331d86-1f29-4a49-a436-2e7f9c55a2d0-config\") pod \"dnsmasq-dns-5784cf869f-8rr76\" (UID: \"ed331d86-1f29-4a49-a436-2e7f9c55a2d0\") " pod="openstack/dnsmasq-dns-5784cf869f-8rr76" Oct 01 14:03:19 crc kubenswrapper[4605]: I1001 14:03:19.447076 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/33bae256-76ac-47a8-b5d6-84df403fb294-httpd-config\") pod \"neutron-76d9c5f468-cdf7s\" (UID: \"33bae256-76ac-47a8-b5d6-84df403fb294\") " pod="openstack/neutron-76d9c5f468-cdf7s" Oct 01 14:03:19 crc kubenswrapper[4605]: I1001 14:03:19.447124 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dj4qd\" (UniqueName: \"kubernetes.io/projected/ed331d86-1f29-4a49-a436-2e7f9c55a2d0-kube-api-access-dj4qd\") pod \"dnsmasq-dns-5784cf869f-8rr76\" (UID: \"ed331d86-1f29-4a49-a436-2e7f9c55a2d0\") " pod="openstack/dnsmasq-dns-5784cf869f-8rr76" Oct 01 14:03:19 crc kubenswrapper[4605]: I1001 14:03:19.447158 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ed331d86-1f29-4a49-a436-2e7f9c55a2d0-ovsdbserver-sb\") pod \"dnsmasq-dns-5784cf869f-8rr76\" (UID: \"ed331d86-1f29-4a49-a436-2e7f9c55a2d0\") " pod="openstack/dnsmasq-dns-5784cf869f-8rr76" Oct 01 14:03:19 crc kubenswrapper[4605]: I1001 14:03:19.447203 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pr2xv\" (UniqueName: \"kubernetes.io/projected/33bae256-76ac-47a8-b5d6-84df403fb294-kube-api-access-pr2xv\") pod \"neutron-76d9c5f468-cdf7s\" (UID: \"33bae256-76ac-47a8-b5d6-84df403fb294\") " pod="openstack/neutron-76d9c5f468-cdf7s" Oct 01 14:03:19 crc kubenswrapper[4605]: I1001 14:03:19.447252 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33bae256-76ac-47a8-b5d6-84df403fb294-combined-ca-bundle\") pod \"neutron-76d9c5f468-cdf7s\" (UID: \"33bae256-76ac-47a8-b5d6-84df403fb294\") " pod="openstack/neutron-76d9c5f468-cdf7s" Oct 01 14:03:19 crc kubenswrapper[4605]: I1001 14:03:19.447267 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/33bae256-76ac-47a8-b5d6-84df403fb294-ovndb-tls-certs\") pod \"neutron-76d9c5f468-cdf7s\" (UID: \"33bae256-76ac-47a8-b5d6-84df403fb294\") " pod="openstack/neutron-76d9c5f468-cdf7s" Oct 01 14:03:19 crc kubenswrapper[4605]: I1001 14:03:19.447287 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ed331d86-1f29-4a49-a436-2e7f9c55a2d0-dns-svc\") pod \"dnsmasq-dns-5784cf869f-8rr76\" (UID: \"ed331d86-1f29-4a49-a436-2e7f9c55a2d0\") " pod="openstack/dnsmasq-dns-5784cf869f-8rr76" Oct 01 14:03:19 crc kubenswrapper[4605]: I1001 14:03:19.447307 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/33bae256-76ac-47a8-b5d6-84df403fb294-config\") pod \"neutron-76d9c5f468-cdf7s\" (UID: \"33bae256-76ac-47a8-b5d6-84df403fb294\") " pod="openstack/neutron-76d9c5f468-cdf7s" Oct 01 14:03:19 crc kubenswrapper[4605]: I1001 14:03:19.447327 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ed331d86-1f29-4a49-a436-2e7f9c55a2d0-ovsdbserver-nb\") pod \"dnsmasq-dns-5784cf869f-8rr76\" (UID: \"ed331d86-1f29-4a49-a436-2e7f9c55a2d0\") " pod="openstack/dnsmasq-dns-5784cf869f-8rr76" Oct 01 14:03:19 crc kubenswrapper[4605]: I1001 14:03:19.447353 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ed331d86-1f29-4a49-a436-2e7f9c55a2d0-dns-swift-storage-0\") pod \"dnsmasq-dns-5784cf869f-8rr76\" (UID: \"ed331d86-1f29-4a49-a436-2e7f9c55a2d0\") " pod="openstack/dnsmasq-dns-5784cf869f-8rr76" Oct 01 14:03:19 crc kubenswrapper[4605]: I1001 14:03:19.448175 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ed331d86-1f29-4a49-a436-2e7f9c55a2d0-dns-swift-storage-0\") pod \"dnsmasq-dns-5784cf869f-8rr76\" (UID: \"ed331d86-1f29-4a49-a436-2e7f9c55a2d0\") " pod="openstack/dnsmasq-dns-5784cf869f-8rr76" Oct 01 14:03:19 crc kubenswrapper[4605]: I1001 14:03:19.448669 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ed331d86-1f29-4a49-a436-2e7f9c55a2d0-config\") pod \"dnsmasq-dns-5784cf869f-8rr76\" (UID: \"ed331d86-1f29-4a49-a436-2e7f9c55a2d0\") " pod="openstack/dnsmasq-dns-5784cf869f-8rr76" Oct 01 14:03:19 crc kubenswrapper[4605]: I1001 14:03:19.451364 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ed331d86-1f29-4a49-a436-2e7f9c55a2d0-dns-svc\") pod \"dnsmasq-dns-5784cf869f-8rr76\" (UID: \"ed331d86-1f29-4a49-a436-2e7f9c55a2d0\") " pod="openstack/dnsmasq-dns-5784cf869f-8rr76" Oct 01 14:03:19 crc kubenswrapper[4605]: I1001 14:03:19.451392 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ed331d86-1f29-4a49-a436-2e7f9c55a2d0-ovsdbserver-sb\") pod \"dnsmasq-dns-5784cf869f-8rr76\" (UID: \"ed331d86-1f29-4a49-a436-2e7f9c55a2d0\") " pod="openstack/dnsmasq-dns-5784cf869f-8rr76" Oct 01 14:03:19 crc kubenswrapper[4605]: I1001 14:03:19.456749 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ed331d86-1f29-4a49-a436-2e7f9c55a2d0-ovsdbserver-nb\") pod \"dnsmasq-dns-5784cf869f-8rr76\" (UID: \"ed331d86-1f29-4a49-a436-2e7f9c55a2d0\") " pod="openstack/dnsmasq-dns-5784cf869f-8rr76" Oct 01 14:03:19 crc kubenswrapper[4605]: I1001 14:03:19.473373 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dj4qd\" (UniqueName: \"kubernetes.io/projected/ed331d86-1f29-4a49-a436-2e7f9c55a2d0-kube-api-access-dj4qd\") pod \"dnsmasq-dns-5784cf869f-8rr76\" (UID: \"ed331d86-1f29-4a49-a436-2e7f9c55a2d0\") " pod="openstack/dnsmasq-dns-5784cf869f-8rr76" Oct 01 14:03:19 crc kubenswrapper[4605]: I1001 14:03:19.542108 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 01 14:03:19 crc kubenswrapper[4605]: I1001 14:03:19.551795 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pr2xv\" (UniqueName: \"kubernetes.io/projected/33bae256-76ac-47a8-b5d6-84df403fb294-kube-api-access-pr2xv\") pod \"neutron-76d9c5f468-cdf7s\" (UID: \"33bae256-76ac-47a8-b5d6-84df403fb294\") " pod="openstack/neutron-76d9c5f468-cdf7s" Oct 01 14:03:19 crc kubenswrapper[4605]: I1001 14:03:19.552135 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33bae256-76ac-47a8-b5d6-84df403fb294-combined-ca-bundle\") pod \"neutron-76d9c5f468-cdf7s\" (UID: \"33bae256-76ac-47a8-b5d6-84df403fb294\") " pod="openstack/neutron-76d9c5f468-cdf7s" Oct 01 14:03:19 crc kubenswrapper[4605]: I1001 14:03:19.552152 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/33bae256-76ac-47a8-b5d6-84df403fb294-ovndb-tls-certs\") pod \"neutron-76d9c5f468-cdf7s\" (UID: \"33bae256-76ac-47a8-b5d6-84df403fb294\") " pod="openstack/neutron-76d9c5f468-cdf7s" Oct 01 14:03:19 crc kubenswrapper[4605]: I1001 14:03:19.552179 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/33bae256-76ac-47a8-b5d6-84df403fb294-config\") pod \"neutron-76d9c5f468-cdf7s\" (UID: \"33bae256-76ac-47a8-b5d6-84df403fb294\") " pod="openstack/neutron-76d9c5f468-cdf7s" Oct 01 14:03:19 crc kubenswrapper[4605]: I1001 14:03:19.552234 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/33bae256-76ac-47a8-b5d6-84df403fb294-httpd-config\") pod \"neutron-76d9c5f468-cdf7s\" (UID: \"33bae256-76ac-47a8-b5d6-84df403fb294\") " pod="openstack/neutron-76d9c5f468-cdf7s" Oct 01 14:03:19 crc kubenswrapper[4605]: I1001 14:03:19.564957 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5784cf869f-8rr76" Oct 01 14:03:19 crc kubenswrapper[4605]: I1001 14:03:19.571629 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/33bae256-76ac-47a8-b5d6-84df403fb294-ovndb-tls-certs\") pod \"neutron-76d9c5f468-cdf7s\" (UID: \"33bae256-76ac-47a8-b5d6-84df403fb294\") " pod="openstack/neutron-76d9c5f468-cdf7s" Oct 01 14:03:19 crc kubenswrapper[4605]: I1001 14:03:19.577481 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33bae256-76ac-47a8-b5d6-84df403fb294-combined-ca-bundle\") pod \"neutron-76d9c5f468-cdf7s\" (UID: \"33bae256-76ac-47a8-b5d6-84df403fb294\") " pod="openstack/neutron-76d9c5f468-cdf7s" Oct 01 14:03:19 crc kubenswrapper[4605]: I1001 14:03:19.587900 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/33bae256-76ac-47a8-b5d6-84df403fb294-config\") pod \"neutron-76d9c5f468-cdf7s\" (UID: \"33bae256-76ac-47a8-b5d6-84df403fb294\") " pod="openstack/neutron-76d9c5f468-cdf7s" Oct 01 14:03:19 crc kubenswrapper[4605]: I1001 14:03:19.595853 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/33bae256-76ac-47a8-b5d6-84df403fb294-httpd-config\") pod \"neutron-76d9c5f468-cdf7s\" (UID: \"33bae256-76ac-47a8-b5d6-84df403fb294\") " pod="openstack/neutron-76d9c5f468-cdf7s" Oct 01 14:03:19 crc kubenswrapper[4605]: I1001 14:03:19.608136 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pr2xv\" (UniqueName: \"kubernetes.io/projected/33bae256-76ac-47a8-b5d6-84df403fb294-kube-api-access-pr2xv\") pod \"neutron-76d9c5f468-cdf7s\" (UID: \"33bae256-76ac-47a8-b5d6-84df403fb294\") " pod="openstack/neutron-76d9c5f468-cdf7s" Oct 01 14:03:19 crc kubenswrapper[4605]: I1001 14:03:19.675159 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-76d9c5f468-cdf7s" Oct 01 14:03:19 crc kubenswrapper[4605]: I1001 14:03:19.926066 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 01 14:03:19 crc kubenswrapper[4605]: I1001 14:03:19.935442 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59d5ff467f-hvfr5" Oct 01 14:03:19 crc kubenswrapper[4605]: I1001 14:03:19.972657 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f759e02e-90e1-4fa7-b0e5-d79e726d077c-ovsdbserver-nb\") pod \"f759e02e-90e1-4fa7-b0e5-d79e726d077c\" (UID: \"f759e02e-90e1-4fa7-b0e5-d79e726d077c\") " Oct 01 14:03:19 crc kubenswrapper[4605]: I1001 14:03:19.972757 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f759e02e-90e1-4fa7-b0e5-d79e726d077c-ovsdbserver-sb\") pod \"f759e02e-90e1-4fa7-b0e5-d79e726d077c\" (UID: \"f759e02e-90e1-4fa7-b0e5-d79e726d077c\") " Oct 01 14:03:19 crc kubenswrapper[4605]: I1001 14:03:19.972879 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f759e02e-90e1-4fa7-b0e5-d79e726d077c-dns-swift-storage-0\") pod \"f759e02e-90e1-4fa7-b0e5-d79e726d077c\" (UID: \"f759e02e-90e1-4fa7-b0e5-d79e726d077c\") " Oct 01 14:03:19 crc kubenswrapper[4605]: I1001 14:03:19.972936 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f759e02e-90e1-4fa7-b0e5-d79e726d077c-dns-svc\") pod \"f759e02e-90e1-4fa7-b0e5-d79e726d077c\" (UID: \"f759e02e-90e1-4fa7-b0e5-d79e726d077c\") " Oct 01 14:03:19 crc kubenswrapper[4605]: I1001 14:03:19.973009 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f759e02e-90e1-4fa7-b0e5-d79e726d077c-config\") pod \"f759e02e-90e1-4fa7-b0e5-d79e726d077c\" (UID: \"f759e02e-90e1-4fa7-b0e5-d79e726d077c\") " Oct 01 14:03:19 crc kubenswrapper[4605]: I1001 14:03:19.973049 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f992w\" (UniqueName: \"kubernetes.io/projected/f759e02e-90e1-4fa7-b0e5-d79e726d077c-kube-api-access-f992w\") pod \"f759e02e-90e1-4fa7-b0e5-d79e726d077c\" (UID: \"f759e02e-90e1-4fa7-b0e5-d79e726d077c\") " Oct 01 14:03:19 crc kubenswrapper[4605]: I1001 14:03:19.990020 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-69c986f6d7-h476p"] Oct 01 14:03:19 crc kubenswrapper[4605]: I1001 14:03:19.990057 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"23411507-ae6e-438e-84e6-3505947e632b","Type":"ContainerStarted","Data":"384fc5e265f5110d3e9682dccae50a7ccef2078bd67f945d346ec3ef69379934"} Oct 01 14:03:20 crc kubenswrapper[4605]: I1001 14:03:20.027167 4605 generic.go:334] "Generic (PLEG): container finished" podID="f759e02e-90e1-4fa7-b0e5-d79e726d077c" containerID="f67a4eb55a8524dd81d74d58905b8dee29bb3399f308c031f467921c58de6883" exitCode=0 Oct 01 14:03:20 crc kubenswrapper[4605]: I1001 14:03:20.027226 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59d5ff467f-hvfr5" event={"ID":"f759e02e-90e1-4fa7-b0e5-d79e726d077c","Type":"ContainerDied","Data":"f67a4eb55a8524dd81d74d58905b8dee29bb3399f308c031f467921c58de6883"} Oct 01 14:03:20 crc kubenswrapper[4605]: I1001 14:03:20.027253 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59d5ff467f-hvfr5" event={"ID":"f759e02e-90e1-4fa7-b0e5-d79e726d077c","Type":"ContainerDied","Data":"cb598ef07277a78d3ab4bfaa10cae42ffce108bb35298e5eeb931894adf0fbb1"} Oct 01 14:03:20 crc kubenswrapper[4605]: I1001 14:03:20.027269 4605 scope.go:117] "RemoveContainer" containerID="f67a4eb55a8524dd81d74d58905b8dee29bb3399f308c031f467921c58de6883" Oct 01 14:03:20 crc kubenswrapper[4605]: I1001 14:03:20.036749 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f759e02e-90e1-4fa7-b0e5-d79e726d077c-kube-api-access-f992w" (OuterVolumeSpecName: "kube-api-access-f992w") pod "f759e02e-90e1-4fa7-b0e5-d79e726d077c" (UID: "f759e02e-90e1-4fa7-b0e5-d79e726d077c"). InnerVolumeSpecName "kube-api-access-f992w". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:03:20 crc kubenswrapper[4605]: I1001 14:03:20.078573 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f992w\" (UniqueName: \"kubernetes.io/projected/f759e02e-90e1-4fa7-b0e5-d79e726d077c-kube-api-access-f992w\") on node \"crc\" DevicePath \"\"" Oct 01 14:03:20 crc kubenswrapper[4605]: I1001 14:03:20.127352 4605 scope.go:117] "RemoveContainer" containerID="3115c78b273f3871b10c0ad240390f403b3c55939e7af8249390cd5e58e99aa9" Oct 01 14:03:20 crc kubenswrapper[4605]: I1001 14:03:20.202674 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f759e02e-90e1-4fa7-b0e5-d79e726d077c-config" (OuterVolumeSpecName: "config") pod "f759e02e-90e1-4fa7-b0e5-d79e726d077c" (UID: "f759e02e-90e1-4fa7-b0e5-d79e726d077c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:03:20 crc kubenswrapper[4605]: I1001 14:03:20.266486 4605 scope.go:117] "RemoveContainer" containerID="f67a4eb55a8524dd81d74d58905b8dee29bb3399f308c031f467921c58de6883" Oct 01 14:03:20 crc kubenswrapper[4605]: I1001 14:03:20.289638 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f759e02e-90e1-4fa7-b0e5-d79e726d077c-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "f759e02e-90e1-4fa7-b0e5-d79e726d077c" (UID: "f759e02e-90e1-4fa7-b0e5-d79e726d077c"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:03:20 crc kubenswrapper[4605]: E1001 14:03:20.289758 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f67a4eb55a8524dd81d74d58905b8dee29bb3399f308c031f467921c58de6883\": container with ID starting with f67a4eb55a8524dd81d74d58905b8dee29bb3399f308c031f467921c58de6883 not found: ID does not exist" containerID="f67a4eb55a8524dd81d74d58905b8dee29bb3399f308c031f467921c58de6883" Oct 01 14:03:20 crc kubenswrapper[4605]: I1001 14:03:20.289795 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f67a4eb55a8524dd81d74d58905b8dee29bb3399f308c031f467921c58de6883"} err="failed to get container status \"f67a4eb55a8524dd81d74d58905b8dee29bb3399f308c031f467921c58de6883\": rpc error: code = NotFound desc = could not find container \"f67a4eb55a8524dd81d74d58905b8dee29bb3399f308c031f467921c58de6883\": container with ID starting with f67a4eb55a8524dd81d74d58905b8dee29bb3399f308c031f467921c58de6883 not found: ID does not exist" Oct 01 14:03:20 crc kubenswrapper[4605]: I1001 14:03:20.289817 4605 scope.go:117] "RemoveContainer" containerID="3115c78b273f3871b10c0ad240390f403b3c55939e7af8249390cd5e58e99aa9" Oct 01 14:03:20 crc kubenswrapper[4605]: I1001 14:03:20.290960 4605 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f759e02e-90e1-4fa7-b0e5-d79e726d077c-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 01 14:03:20 crc kubenswrapper[4605]: I1001 14:03:20.290996 4605 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f759e02e-90e1-4fa7-b0e5-d79e726d077c-config\") on node \"crc\" DevicePath \"\"" Oct 01 14:03:20 crc kubenswrapper[4605]: E1001 14:03:20.299412 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3115c78b273f3871b10c0ad240390f403b3c55939e7af8249390cd5e58e99aa9\": container with ID starting with 3115c78b273f3871b10c0ad240390f403b3c55939e7af8249390cd5e58e99aa9 not found: ID does not exist" containerID="3115c78b273f3871b10c0ad240390f403b3c55939e7af8249390cd5e58e99aa9" Oct 01 14:03:20 crc kubenswrapper[4605]: I1001 14:03:20.299451 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3115c78b273f3871b10c0ad240390f403b3c55939e7af8249390cd5e58e99aa9"} err="failed to get container status \"3115c78b273f3871b10c0ad240390f403b3c55939e7af8249390cd5e58e99aa9\": rpc error: code = NotFound desc = could not find container \"3115c78b273f3871b10c0ad240390f403b3c55939e7af8249390cd5e58e99aa9\": container with ID starting with 3115c78b273f3871b10c0ad240390f403b3c55939e7af8249390cd5e58e99aa9 not found: ID does not exist" Oct 01 14:03:20 crc kubenswrapper[4605]: I1001 14:03:20.366341 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f759e02e-90e1-4fa7-b0e5-d79e726d077c-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "f759e02e-90e1-4fa7-b0e5-d79e726d077c" (UID: "f759e02e-90e1-4fa7-b0e5-d79e726d077c"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:03:20 crc kubenswrapper[4605]: I1001 14:03:20.370394 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-8rr76"] Oct 01 14:03:20 crc kubenswrapper[4605]: I1001 14:03:20.395283 4605 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f759e02e-90e1-4fa7-b0e5-d79e726d077c-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 01 14:03:20 crc kubenswrapper[4605]: I1001 14:03:20.429616 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f759e02e-90e1-4fa7-b0e5-d79e726d077c-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "f759e02e-90e1-4fa7-b0e5-d79e726d077c" (UID: "f759e02e-90e1-4fa7-b0e5-d79e726d077c"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:03:20 crc kubenswrapper[4605]: I1001 14:03:20.466706 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f759e02e-90e1-4fa7-b0e5-d79e726d077c-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "f759e02e-90e1-4fa7-b0e5-d79e726d077c" (UID: "f759e02e-90e1-4fa7-b0e5-d79e726d077c"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:03:20 crc kubenswrapper[4605]: I1001 14:03:20.497765 4605 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f759e02e-90e1-4fa7-b0e5-d79e726d077c-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 01 14:03:20 crc kubenswrapper[4605]: I1001 14:03:20.497803 4605 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f759e02e-90e1-4fa7-b0e5-d79e726d077c-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 01 14:03:20 crc kubenswrapper[4605]: I1001 14:03:20.714028 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-76d9c5f468-cdf7s"] Oct 01 14:03:21 crc kubenswrapper[4605]: I1001 14:03:21.104917 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-69c986f6d7-h476p" event={"ID":"fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9","Type":"ContainerStarted","Data":"9a49386be22896f5086f7bd846653306d67c5f0213e1bc36b2799166f2c1260a"} Oct 01 14:03:21 crc kubenswrapper[4605]: I1001 14:03:21.118322 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-76d9c5f468-cdf7s" event={"ID":"33bae256-76ac-47a8-b5d6-84df403fb294","Type":"ContainerStarted","Data":"0c99f574224658edde3e2cfacf098b4c7cc1b9344c78981a2e9ea8eefca66518"} Oct 01 14:03:21 crc kubenswrapper[4605]: I1001 14:03:21.130847 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"899fd156-9323-4509-8caf-474844c688ef","Type":"ContainerStarted","Data":"2bfd179030c91fe00463b9e99d2c50f19ed828c143d0eeef3873e65328cbddf5"} Oct 01 14:03:21 crc kubenswrapper[4605]: I1001 14:03:21.135884 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5784cf869f-8rr76" event={"ID":"ed331d86-1f29-4a49-a436-2e7f9c55a2d0","Type":"ContainerStarted","Data":"489e42d187559bd90aa03ddd16375449367c120a8eafbe582b68c138dbe9768c"} Oct 01 14:03:21 crc kubenswrapper[4605]: I1001 14:03:21.137546 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59d5ff467f-hvfr5" Oct 01 14:03:21 crc kubenswrapper[4605]: I1001 14:03:21.203495 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-59d5ff467f-hvfr5"] Oct 01 14:03:21 crc kubenswrapper[4605]: I1001 14:03:21.228889 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-59d5ff467f-hvfr5"] Oct 01 14:03:21 crc kubenswrapper[4605]: I1001 14:03:21.631551 4605 patch_prober.go:28] interesting pod/machine-config-daemon-zdjh7 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 14:03:21 crc kubenswrapper[4605]: I1001 14:03:21.631614 4605 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 14:03:21 crc kubenswrapper[4605]: I1001 14:03:21.915133 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Oct 01 14:03:21 crc kubenswrapper[4605]: I1001 14:03:21.951422 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f759e02e-90e1-4fa7-b0e5-d79e726d077c" path="/var/lib/kubelet/pods/f759e02e-90e1-4fa7-b0e5-d79e726d077c/volumes" Oct 01 14:03:22 crc kubenswrapper[4605]: I1001 14:03:22.182152 4605 generic.go:334] "Generic (PLEG): container finished" podID="ed331d86-1f29-4a49-a436-2e7f9c55a2d0" containerID="bcbb6bf16b4baee0c0b23c4457ed083a5446b54045ff1072aa2bc1130eb7992b" exitCode=0 Oct 01 14:03:22 crc kubenswrapper[4605]: I1001 14:03:22.182253 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5784cf869f-8rr76" event={"ID":"ed331d86-1f29-4a49-a436-2e7f9c55a2d0","Type":"ContainerDied","Data":"bcbb6bf16b4baee0c0b23c4457ed083a5446b54045ff1072aa2bc1130eb7992b"} Oct 01 14:03:22 crc kubenswrapper[4605]: I1001 14:03:22.201655 4605 generic.go:334] "Generic (PLEG): container finished" podID="fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9" containerID="9c96fd73407bf5878ea33b612680e722f331059d8a320d57db2e992ead4dfe8b" exitCode=0 Oct 01 14:03:22 crc kubenswrapper[4605]: I1001 14:03:22.201728 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-69c986f6d7-h476p" event={"ID":"fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9","Type":"ContainerDied","Data":"9c96fd73407bf5878ea33b612680e722f331059d8a320d57db2e992ead4dfe8b"} Oct 01 14:03:22 crc kubenswrapper[4605]: I1001 14:03:22.212000 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-76d9c5f468-cdf7s" event={"ID":"33bae256-76ac-47a8-b5d6-84df403fb294","Type":"ContainerStarted","Data":"9295dbb20a5defeafe207475f4bb052a4a5c0848cd78d13c0bc653ff9174bd58"} Oct 01 14:03:22 crc kubenswrapper[4605]: I1001 14:03:22.220059 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"899fd156-9323-4509-8caf-474844c688ef","Type":"ContainerStarted","Data":"5a99040db5650682954b05430ab8444d4e20b31a51305ca8d505b07d5058ae13"} Oct 01 14:03:22 crc kubenswrapper[4605]: I1001 14:03:22.460493 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-749477d64b-5dpnm" Oct 01 14:03:22 crc kubenswrapper[4605]: I1001 14:03:22.571476 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-64d6df575b-5ctbf" Oct 01 14:03:22 crc kubenswrapper[4605]: I1001 14:03:22.742558 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-749477d64b-5dpnm"] Oct 01 14:03:22 crc kubenswrapper[4605]: I1001 14:03:22.750213 4605 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-558ff4bf9d-h49gp" podUID="1c70fd20-00d7-4bbd-9f5d-67b674a92e84" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.161:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 01 14:03:22 crc kubenswrapper[4605]: I1001 14:03:22.750508 4605 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-558ff4bf9d-h49gp" podUID="1c70fd20-00d7-4bbd-9f5d-67b674a92e84" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.161:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 01 14:03:22 crc kubenswrapper[4605]: I1001 14:03:22.966459 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-69c986f6d7-h476p" Oct 01 14:03:22 crc kubenswrapper[4605]: I1001 14:03:22.985127 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9-config\") pod \"fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9\" (UID: \"fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9\") " Oct 01 14:03:22 crc kubenswrapper[4605]: I1001 14:03:22.985209 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9-ovsdbserver-nb\") pod \"fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9\" (UID: \"fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9\") " Oct 01 14:03:22 crc kubenswrapper[4605]: I1001 14:03:22.985251 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-llc7c\" (UniqueName: \"kubernetes.io/projected/fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9-kube-api-access-llc7c\") pod \"fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9\" (UID: \"fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9\") " Oct 01 14:03:22 crc kubenswrapper[4605]: I1001 14:03:22.985324 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9-dns-swift-storage-0\") pod \"fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9\" (UID: \"fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9\") " Oct 01 14:03:22 crc kubenswrapper[4605]: I1001 14:03:22.985351 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9-dns-svc\") pod \"fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9\" (UID: \"fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9\") " Oct 01 14:03:22 crc kubenswrapper[4605]: I1001 14:03:22.985433 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9-ovsdbserver-sb\") pod \"fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9\" (UID: \"fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9\") " Oct 01 14:03:23 crc kubenswrapper[4605]: I1001 14:03:23.011389 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9-kube-api-access-llc7c" (OuterVolumeSpecName: "kube-api-access-llc7c") pod "fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9" (UID: "fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9"). InnerVolumeSpecName "kube-api-access-llc7c". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:03:23 crc kubenswrapper[4605]: I1001 14:03:23.055395 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9" (UID: "fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:03:23 crc kubenswrapper[4605]: I1001 14:03:23.083892 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9-config" (OuterVolumeSpecName: "config") pod "fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9" (UID: "fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:03:23 crc kubenswrapper[4605]: I1001 14:03:23.084373 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9" (UID: "fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:03:23 crc kubenswrapper[4605]: I1001 14:03:23.088383 4605 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 01 14:03:23 crc kubenswrapper[4605]: I1001 14:03:23.088409 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-llc7c\" (UniqueName: \"kubernetes.io/projected/fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9-kube-api-access-llc7c\") on node \"crc\" DevicePath \"\"" Oct 01 14:03:23 crc kubenswrapper[4605]: I1001 14:03:23.088419 4605 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 01 14:03:23 crc kubenswrapper[4605]: I1001 14:03:23.088428 4605 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9-config\") on node \"crc\" DevicePath \"\"" Oct 01 14:03:23 crc kubenswrapper[4605]: I1001 14:03:23.089355 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9" (UID: "fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:03:23 crc kubenswrapper[4605]: I1001 14:03:23.113616 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9" (UID: "fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:03:23 crc kubenswrapper[4605]: I1001 14:03:23.194176 4605 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 01 14:03:23 crc kubenswrapper[4605]: I1001 14:03:23.194422 4605 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 01 14:03:23 crc kubenswrapper[4605]: I1001 14:03:23.262530 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"23411507-ae6e-438e-84e6-3505947e632b","Type":"ContainerStarted","Data":"96e5f5adf67b8bfa82b4f32208b1182d7d6193535d085d70b443c588f528959b"} Oct 01 14:03:23 crc kubenswrapper[4605]: I1001 14:03:23.264366 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-69c986f6d7-h476p" event={"ID":"fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9","Type":"ContainerDied","Data":"9a49386be22896f5086f7bd846653306d67c5f0213e1bc36b2799166f2c1260a"} Oct 01 14:03:23 crc kubenswrapper[4605]: I1001 14:03:23.264419 4605 scope.go:117] "RemoveContainer" containerID="9c96fd73407bf5878ea33b612680e722f331059d8a320d57db2e992ead4dfe8b" Oct 01 14:03:23 crc kubenswrapper[4605]: I1001 14:03:23.264486 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-69c986f6d7-h476p" Oct 01 14:03:23 crc kubenswrapper[4605]: I1001 14:03:23.269123 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-76d9c5f468-cdf7s" event={"ID":"33bae256-76ac-47a8-b5d6-84df403fb294","Type":"ContainerStarted","Data":"b08d83199c13552e355c429f0810c2d68f6be6bd4e1d171dd46d1f00abb8b248"} Oct 01 14:03:23 crc kubenswrapper[4605]: I1001 14:03:23.269255 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-76d9c5f468-cdf7s" Oct 01 14:03:23 crc kubenswrapper[4605]: I1001 14:03:23.271219 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-749477d64b-5dpnm" podUID="19af3ac5-8b1a-4301-88a2-96ae085ee9e0" containerName="horizon-log" containerID="cri-o://6a2599a0ed41e906ee251f3a471189a1a7710e3f7990ee28a804ea625997a8bf" gracePeriod=30 Oct 01 14:03:23 crc kubenswrapper[4605]: I1001 14:03:23.271820 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5784cf869f-8rr76" event={"ID":"ed331d86-1f29-4a49-a436-2e7f9c55a2d0","Type":"ContainerStarted","Data":"1f76b559baad8105afd4645aaa68235d885c9fefea55e231d8b64126ae9cf55c"} Oct 01 14:03:23 crc kubenswrapper[4605]: I1001 14:03:23.271863 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5784cf869f-8rr76" Oct 01 14:03:23 crc kubenswrapper[4605]: I1001 14:03:23.271905 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-749477d64b-5dpnm" podUID="19af3ac5-8b1a-4301-88a2-96ae085ee9e0" containerName="horizon" containerID="cri-o://423669f3dc500c10e2f1d04f16ebcb37769e2415aca599a5a0c901725e42b5f9" gracePeriod=30 Oct 01 14:03:23 crc kubenswrapper[4605]: I1001 14:03:23.301536 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5784cf869f-8rr76" podStartSLOduration=4.301516985 podStartE2EDuration="4.301516985s" podCreationTimestamp="2025-10-01 14:03:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 14:03:23.299258098 +0000 UTC m=+1126.043234306" watchObservedRunningTime="2025-10-01 14:03:23.301516985 +0000 UTC m=+1126.045493193" Oct 01 14:03:23 crc kubenswrapper[4605]: I1001 14:03:23.335807 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-76d9c5f468-cdf7s" podStartSLOduration=4.335792422 podStartE2EDuration="4.335792422s" podCreationTimestamp="2025-10-01 14:03:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 14:03:23.330347335 +0000 UTC m=+1126.074323543" watchObservedRunningTime="2025-10-01 14:03:23.335792422 +0000 UTC m=+1126.079768630" Oct 01 14:03:23 crc kubenswrapper[4605]: I1001 14:03:23.400968 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-69c986f6d7-h476p"] Oct 01 14:03:23 crc kubenswrapper[4605]: I1001 14:03:23.412163 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-69c986f6d7-h476p"] Oct 01 14:03:23 crc kubenswrapper[4605]: I1001 14:03:23.939220 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9" path="/var/lib/kubelet/pods/fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9/volumes" Oct 01 14:03:24 crc kubenswrapper[4605]: I1001 14:03:24.331534 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"899fd156-9323-4509-8caf-474844c688ef","Type":"ContainerStarted","Data":"f6a6fdd0c06a1ec141cd568b1c8b3a5fa5b94345559421d279494f3b8aa70c96"} Oct 01 14:03:24 crc kubenswrapper[4605]: I1001 14:03:24.331744 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="899fd156-9323-4509-8caf-474844c688ef" containerName="cinder-api-log" containerID="cri-o://5a99040db5650682954b05430ab8444d4e20b31a51305ca8d505b07d5058ae13" gracePeriod=30 Oct 01 14:03:24 crc kubenswrapper[4605]: I1001 14:03:24.332013 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Oct 01 14:03:24 crc kubenswrapper[4605]: I1001 14:03:24.332264 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="899fd156-9323-4509-8caf-474844c688ef" containerName="cinder-api" containerID="cri-o://f6a6fdd0c06a1ec141cd568b1c8b3a5fa5b94345559421d279494f3b8aa70c96" gracePeriod=30 Oct 01 14:03:24 crc kubenswrapper[4605]: I1001 14:03:24.347663 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"23411507-ae6e-438e-84e6-3505947e632b","Type":"ContainerStarted","Data":"9a1f4c13caf7d4f48175afeb8a6cbcedd0022b8baeacde596ed624b53c65a6a7"} Oct 01 14:03:24 crc kubenswrapper[4605]: I1001 14:03:24.362491 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=6.362476411 podStartE2EDuration="6.362476411s" podCreationTimestamp="2025-10-01 14:03:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 14:03:24.358459989 +0000 UTC m=+1127.102436197" watchObservedRunningTime="2025-10-01 14:03:24.362476411 +0000 UTC m=+1127.106452619" Oct 01 14:03:24 crc kubenswrapper[4605]: I1001 14:03:24.382864 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=5.009918467 podStartE2EDuration="6.382843076s" podCreationTimestamp="2025-10-01 14:03:18 +0000 UTC" firstStartedPulling="2025-10-01 14:03:19.565438248 +0000 UTC m=+1122.309414456" lastFinishedPulling="2025-10-01 14:03:20.938362857 +0000 UTC m=+1123.682339065" observedRunningTime="2025-10-01 14:03:24.377917742 +0000 UTC m=+1127.121893950" watchObservedRunningTime="2025-10-01 14:03:24.382843076 +0000 UTC m=+1127.126819284" Oct 01 14:03:24 crc kubenswrapper[4605]: I1001 14:03:24.977792 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-5cc6b8b7dd-f8khf" Oct 01 14:03:25 crc kubenswrapper[4605]: I1001 14:03:25.359493 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-558ff4bf9d-h49gp" Oct 01 14:03:25 crc kubenswrapper[4605]: I1001 14:03:25.363940 4605 generic.go:334] "Generic (PLEG): container finished" podID="899fd156-9323-4509-8caf-474844c688ef" containerID="5a99040db5650682954b05430ab8444d4e20b31a51305ca8d505b07d5058ae13" exitCode=143 Oct 01 14:03:25 crc kubenswrapper[4605]: I1001 14:03:25.364007 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"899fd156-9323-4509-8caf-474844c688ef","Type":"ContainerDied","Data":"5a99040db5650682954b05430ab8444d4e20b31a51305ca8d505b07d5058ae13"} Oct 01 14:03:25 crc kubenswrapper[4605]: I1001 14:03:25.639342 4605 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-59b677b5cd-92trn" podUID="b92ac133-ded4-4276-a43a-7d9414d051ab" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.0.162:9311/healthcheck\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 01 14:03:25 crc kubenswrapper[4605]: I1001 14:03:25.709643 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-f85558977-q9rhp"] Oct 01 14:03:25 crc kubenswrapper[4605]: E1001 14:03:25.710251 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f759e02e-90e1-4fa7-b0e5-d79e726d077c" containerName="init" Oct 01 14:03:25 crc kubenswrapper[4605]: I1001 14:03:25.710266 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="f759e02e-90e1-4fa7-b0e5-d79e726d077c" containerName="init" Oct 01 14:03:25 crc kubenswrapper[4605]: E1001 14:03:25.710284 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9" containerName="init" Oct 01 14:03:25 crc kubenswrapper[4605]: I1001 14:03:25.710291 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9" containerName="init" Oct 01 14:03:25 crc kubenswrapper[4605]: E1001 14:03:25.710323 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f759e02e-90e1-4fa7-b0e5-d79e726d077c" containerName="dnsmasq-dns" Oct 01 14:03:25 crc kubenswrapper[4605]: I1001 14:03:25.710329 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="f759e02e-90e1-4fa7-b0e5-d79e726d077c" containerName="dnsmasq-dns" Oct 01 14:03:25 crc kubenswrapper[4605]: I1001 14:03:25.710503 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="fab464ae-0064-4b8d-b3f1-fdaf4a6f75d9" containerName="init" Oct 01 14:03:25 crc kubenswrapper[4605]: I1001 14:03:25.710525 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="f759e02e-90e1-4fa7-b0e5-d79e726d077c" containerName="dnsmasq-dns" Oct 01 14:03:25 crc kubenswrapper[4605]: I1001 14:03:25.720278 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-f85558977-q9rhp" Oct 01 14:03:25 crc kubenswrapper[4605]: I1001 14:03:25.722857 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Oct 01 14:03:25 crc kubenswrapper[4605]: I1001 14:03:25.741476 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Oct 01 14:03:25 crc kubenswrapper[4605]: I1001 14:03:25.748970 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/67c3654e-3eed-4260-8864-3ab0334a32a0-ovndb-tls-certs\") pod \"neutron-f85558977-q9rhp\" (UID: \"67c3654e-3eed-4260-8864-3ab0334a32a0\") " pod="openstack/neutron-f85558977-q9rhp" Oct 01 14:03:25 crc kubenswrapper[4605]: I1001 14:03:25.749038 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67c3654e-3eed-4260-8864-3ab0334a32a0-combined-ca-bundle\") pod \"neutron-f85558977-q9rhp\" (UID: \"67c3654e-3eed-4260-8864-3ab0334a32a0\") " pod="openstack/neutron-f85558977-q9rhp" Oct 01 14:03:25 crc kubenswrapper[4605]: I1001 14:03:25.749081 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-psqgd\" (UniqueName: \"kubernetes.io/projected/67c3654e-3eed-4260-8864-3ab0334a32a0-kube-api-access-psqgd\") pod \"neutron-f85558977-q9rhp\" (UID: \"67c3654e-3eed-4260-8864-3ab0334a32a0\") " pod="openstack/neutron-f85558977-q9rhp" Oct 01 14:03:25 crc kubenswrapper[4605]: I1001 14:03:25.752396 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/67c3654e-3eed-4260-8864-3ab0334a32a0-config\") pod \"neutron-f85558977-q9rhp\" (UID: \"67c3654e-3eed-4260-8864-3ab0334a32a0\") " pod="openstack/neutron-f85558977-q9rhp" Oct 01 14:03:25 crc kubenswrapper[4605]: I1001 14:03:25.752610 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/67c3654e-3eed-4260-8864-3ab0334a32a0-public-tls-certs\") pod \"neutron-f85558977-q9rhp\" (UID: \"67c3654e-3eed-4260-8864-3ab0334a32a0\") " pod="openstack/neutron-f85558977-q9rhp" Oct 01 14:03:25 crc kubenswrapper[4605]: I1001 14:03:25.752678 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/67c3654e-3eed-4260-8864-3ab0334a32a0-internal-tls-certs\") pod \"neutron-f85558977-q9rhp\" (UID: \"67c3654e-3eed-4260-8864-3ab0334a32a0\") " pod="openstack/neutron-f85558977-q9rhp" Oct 01 14:03:25 crc kubenswrapper[4605]: I1001 14:03:25.752728 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/67c3654e-3eed-4260-8864-3ab0334a32a0-httpd-config\") pod \"neutron-f85558977-q9rhp\" (UID: \"67c3654e-3eed-4260-8864-3ab0334a32a0\") " pod="openstack/neutron-f85558977-q9rhp" Oct 01 14:03:25 crc kubenswrapper[4605]: I1001 14:03:25.765581 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-f85558977-q9rhp"] Oct 01 14:03:25 crc kubenswrapper[4605]: I1001 14:03:25.854240 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/67c3654e-3eed-4260-8864-3ab0334a32a0-ovndb-tls-certs\") pod \"neutron-f85558977-q9rhp\" (UID: \"67c3654e-3eed-4260-8864-3ab0334a32a0\") " pod="openstack/neutron-f85558977-q9rhp" Oct 01 14:03:25 crc kubenswrapper[4605]: I1001 14:03:25.854321 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67c3654e-3eed-4260-8864-3ab0334a32a0-combined-ca-bundle\") pod \"neutron-f85558977-q9rhp\" (UID: \"67c3654e-3eed-4260-8864-3ab0334a32a0\") " pod="openstack/neutron-f85558977-q9rhp" Oct 01 14:03:25 crc kubenswrapper[4605]: I1001 14:03:25.854357 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-psqgd\" (UniqueName: \"kubernetes.io/projected/67c3654e-3eed-4260-8864-3ab0334a32a0-kube-api-access-psqgd\") pod \"neutron-f85558977-q9rhp\" (UID: \"67c3654e-3eed-4260-8864-3ab0334a32a0\") " pod="openstack/neutron-f85558977-q9rhp" Oct 01 14:03:25 crc kubenswrapper[4605]: I1001 14:03:25.854408 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/67c3654e-3eed-4260-8864-3ab0334a32a0-config\") pod \"neutron-f85558977-q9rhp\" (UID: \"67c3654e-3eed-4260-8864-3ab0334a32a0\") " pod="openstack/neutron-f85558977-q9rhp" Oct 01 14:03:25 crc kubenswrapper[4605]: I1001 14:03:25.854477 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/67c3654e-3eed-4260-8864-3ab0334a32a0-public-tls-certs\") pod \"neutron-f85558977-q9rhp\" (UID: \"67c3654e-3eed-4260-8864-3ab0334a32a0\") " pod="openstack/neutron-f85558977-q9rhp" Oct 01 14:03:25 crc kubenswrapper[4605]: I1001 14:03:25.854513 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/67c3654e-3eed-4260-8864-3ab0334a32a0-internal-tls-certs\") pod \"neutron-f85558977-q9rhp\" (UID: \"67c3654e-3eed-4260-8864-3ab0334a32a0\") " pod="openstack/neutron-f85558977-q9rhp" Oct 01 14:03:25 crc kubenswrapper[4605]: I1001 14:03:25.854542 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/67c3654e-3eed-4260-8864-3ab0334a32a0-httpd-config\") pod \"neutron-f85558977-q9rhp\" (UID: \"67c3654e-3eed-4260-8864-3ab0334a32a0\") " pod="openstack/neutron-f85558977-q9rhp" Oct 01 14:03:25 crc kubenswrapper[4605]: I1001 14:03:25.865515 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/67c3654e-3eed-4260-8864-3ab0334a32a0-config\") pod \"neutron-f85558977-q9rhp\" (UID: \"67c3654e-3eed-4260-8864-3ab0334a32a0\") " pod="openstack/neutron-f85558977-q9rhp" Oct 01 14:03:25 crc kubenswrapper[4605]: I1001 14:03:25.880630 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/67c3654e-3eed-4260-8864-3ab0334a32a0-ovndb-tls-certs\") pod \"neutron-f85558977-q9rhp\" (UID: \"67c3654e-3eed-4260-8864-3ab0334a32a0\") " pod="openstack/neutron-f85558977-q9rhp" Oct 01 14:03:25 crc kubenswrapper[4605]: I1001 14:03:25.881192 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/67c3654e-3eed-4260-8864-3ab0334a32a0-public-tls-certs\") pod \"neutron-f85558977-q9rhp\" (UID: \"67c3654e-3eed-4260-8864-3ab0334a32a0\") " pod="openstack/neutron-f85558977-q9rhp" Oct 01 14:03:25 crc kubenswrapper[4605]: I1001 14:03:25.882827 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/67c3654e-3eed-4260-8864-3ab0334a32a0-internal-tls-certs\") pod \"neutron-f85558977-q9rhp\" (UID: \"67c3654e-3eed-4260-8864-3ab0334a32a0\") " pod="openstack/neutron-f85558977-q9rhp" Oct 01 14:03:25 crc kubenswrapper[4605]: I1001 14:03:25.893124 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/67c3654e-3eed-4260-8864-3ab0334a32a0-httpd-config\") pod \"neutron-f85558977-q9rhp\" (UID: \"67c3654e-3eed-4260-8864-3ab0334a32a0\") " pod="openstack/neutron-f85558977-q9rhp" Oct 01 14:03:25 crc kubenswrapper[4605]: I1001 14:03:25.893847 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-psqgd\" (UniqueName: \"kubernetes.io/projected/67c3654e-3eed-4260-8864-3ab0334a32a0-kube-api-access-psqgd\") pod \"neutron-f85558977-q9rhp\" (UID: \"67c3654e-3eed-4260-8864-3ab0334a32a0\") " pod="openstack/neutron-f85558977-q9rhp" Oct 01 14:03:25 crc kubenswrapper[4605]: I1001 14:03:25.895578 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67c3654e-3eed-4260-8864-3ab0334a32a0-combined-ca-bundle\") pod \"neutron-f85558977-q9rhp\" (UID: \"67c3654e-3eed-4260-8864-3ab0334a32a0\") " pod="openstack/neutron-f85558977-q9rhp" Oct 01 14:03:26 crc kubenswrapper[4605]: I1001 14:03:26.055705 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-f85558977-q9rhp" Oct 01 14:03:26 crc kubenswrapper[4605]: I1001 14:03:26.394805 4605 generic.go:334] "Generic (PLEG): container finished" podID="899fd156-9323-4509-8caf-474844c688ef" containerID="f6a6fdd0c06a1ec141cd568b1c8b3a5fa5b94345559421d279494f3b8aa70c96" exitCode=0 Oct 01 14:03:26 crc kubenswrapper[4605]: I1001 14:03:26.394920 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"899fd156-9323-4509-8caf-474844c688ef","Type":"ContainerDied","Data":"f6a6fdd0c06a1ec141cd568b1c8b3a5fa5b94345559421d279494f3b8aa70c96"} Oct 01 14:03:26 crc kubenswrapper[4605]: I1001 14:03:26.669369 4605 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-558ff4bf9d-h49gp" podUID="1c70fd20-00d7-4bbd-9f5d-67b674a92e84" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.161:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 01 14:03:26 crc kubenswrapper[4605]: I1001 14:03:26.677461 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 01 14:03:26 crc kubenswrapper[4605]: I1001 14:03:26.781737 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/899fd156-9323-4509-8caf-474844c688ef-config-data-custom\") pod \"899fd156-9323-4509-8caf-474844c688ef\" (UID: \"899fd156-9323-4509-8caf-474844c688ef\") " Oct 01 14:03:26 crc kubenswrapper[4605]: I1001 14:03:26.781839 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/899fd156-9323-4509-8caf-474844c688ef-scripts\") pod \"899fd156-9323-4509-8caf-474844c688ef\" (UID: \"899fd156-9323-4509-8caf-474844c688ef\") " Oct 01 14:03:26 crc kubenswrapper[4605]: I1001 14:03:26.781885 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/899fd156-9323-4509-8caf-474844c688ef-etc-machine-id\") pod \"899fd156-9323-4509-8caf-474844c688ef\" (UID: \"899fd156-9323-4509-8caf-474844c688ef\") " Oct 01 14:03:26 crc kubenswrapper[4605]: I1001 14:03:26.781974 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qcq5m\" (UniqueName: \"kubernetes.io/projected/899fd156-9323-4509-8caf-474844c688ef-kube-api-access-qcq5m\") pod \"899fd156-9323-4509-8caf-474844c688ef\" (UID: \"899fd156-9323-4509-8caf-474844c688ef\") " Oct 01 14:03:26 crc kubenswrapper[4605]: I1001 14:03:26.782000 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/899fd156-9323-4509-8caf-474844c688ef-combined-ca-bundle\") pod \"899fd156-9323-4509-8caf-474844c688ef\" (UID: \"899fd156-9323-4509-8caf-474844c688ef\") " Oct 01 14:03:26 crc kubenswrapper[4605]: I1001 14:03:26.782037 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/899fd156-9323-4509-8caf-474844c688ef-logs\") pod \"899fd156-9323-4509-8caf-474844c688ef\" (UID: \"899fd156-9323-4509-8caf-474844c688ef\") " Oct 01 14:03:26 crc kubenswrapper[4605]: I1001 14:03:26.782147 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/899fd156-9323-4509-8caf-474844c688ef-config-data\") pod \"899fd156-9323-4509-8caf-474844c688ef\" (UID: \"899fd156-9323-4509-8caf-474844c688ef\") " Oct 01 14:03:26 crc kubenswrapper[4605]: I1001 14:03:26.782536 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/899fd156-9323-4509-8caf-474844c688ef-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "899fd156-9323-4509-8caf-474844c688ef" (UID: "899fd156-9323-4509-8caf-474844c688ef"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 14:03:26 crc kubenswrapper[4605]: I1001 14:03:26.787414 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/899fd156-9323-4509-8caf-474844c688ef-logs" (OuterVolumeSpecName: "logs") pod "899fd156-9323-4509-8caf-474844c688ef" (UID: "899fd156-9323-4509-8caf-474844c688ef"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:03:26 crc kubenswrapper[4605]: I1001 14:03:26.802565 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/899fd156-9323-4509-8caf-474844c688ef-kube-api-access-qcq5m" (OuterVolumeSpecName: "kube-api-access-qcq5m") pod "899fd156-9323-4509-8caf-474844c688ef" (UID: "899fd156-9323-4509-8caf-474844c688ef"). InnerVolumeSpecName "kube-api-access-qcq5m". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:03:26 crc kubenswrapper[4605]: I1001 14:03:26.802840 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/899fd156-9323-4509-8caf-474844c688ef-scripts" (OuterVolumeSpecName: "scripts") pod "899fd156-9323-4509-8caf-474844c688ef" (UID: "899fd156-9323-4509-8caf-474844c688ef"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:03:26 crc kubenswrapper[4605]: I1001 14:03:26.813257 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/899fd156-9323-4509-8caf-474844c688ef-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "899fd156-9323-4509-8caf-474844c688ef" (UID: "899fd156-9323-4509-8caf-474844c688ef"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:03:26 crc kubenswrapper[4605]: I1001 14:03:26.828779 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-6964b49dc5-fgw45" Oct 01 14:03:26 crc kubenswrapper[4605]: I1001 14:03:26.829675 4605 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-749477d64b-5dpnm" podUID="19af3ac5-8b1a-4301-88a2-96ae085ee9e0" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.150:8443/dashboard/auth/login/?next=/dashboard/\": read tcp 10.217.0.2:60146->10.217.0.150:8443: read: connection reset by peer" Oct 01 14:03:26 crc kubenswrapper[4605]: I1001 14:03:26.863137 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/899fd156-9323-4509-8caf-474844c688ef-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "899fd156-9323-4509-8caf-474844c688ef" (UID: "899fd156-9323-4509-8caf-474844c688ef"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:03:26 crc kubenswrapper[4605]: I1001 14:03:26.883860 4605 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/899fd156-9323-4509-8caf-474844c688ef-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 01 14:03:26 crc kubenswrapper[4605]: I1001 14:03:26.883892 4605 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/899fd156-9323-4509-8caf-474844c688ef-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 14:03:26 crc kubenswrapper[4605]: I1001 14:03:26.883906 4605 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/899fd156-9323-4509-8caf-474844c688ef-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 01 14:03:26 crc kubenswrapper[4605]: I1001 14:03:26.883916 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qcq5m\" (UniqueName: \"kubernetes.io/projected/899fd156-9323-4509-8caf-474844c688ef-kube-api-access-qcq5m\") on node \"crc\" DevicePath \"\"" Oct 01 14:03:26 crc kubenswrapper[4605]: I1001 14:03:26.883930 4605 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/899fd156-9323-4509-8caf-474844c688ef-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 14:03:26 crc kubenswrapper[4605]: I1001 14:03:26.883942 4605 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/899fd156-9323-4509-8caf-474844c688ef-logs\") on node \"crc\" DevicePath \"\"" Oct 01 14:03:27 crc kubenswrapper[4605]: I1001 14:03:27.032006 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-f85558977-q9rhp"] Oct 01 14:03:27 crc kubenswrapper[4605]: I1001 14:03:27.059313 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/899fd156-9323-4509-8caf-474844c688ef-config-data" (OuterVolumeSpecName: "config-data") pod "899fd156-9323-4509-8caf-474844c688ef" (UID: "899fd156-9323-4509-8caf-474844c688ef"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:03:27 crc kubenswrapper[4605]: I1001 14:03:27.113683 4605 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/899fd156-9323-4509-8caf-474844c688ef-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 14:03:27 crc kubenswrapper[4605]: I1001 14:03:27.119923 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-5cc6b8b7dd-f8khf" Oct 01 14:03:27 crc kubenswrapper[4605]: I1001 14:03:27.426885 4605 generic.go:334] "Generic (PLEG): container finished" podID="19af3ac5-8b1a-4301-88a2-96ae085ee9e0" containerID="423669f3dc500c10e2f1d04f16ebcb37769e2415aca599a5a0c901725e42b5f9" exitCode=0 Oct 01 14:03:27 crc kubenswrapper[4605]: I1001 14:03:27.427010 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-749477d64b-5dpnm" event={"ID":"19af3ac5-8b1a-4301-88a2-96ae085ee9e0","Type":"ContainerDied","Data":"423669f3dc500c10e2f1d04f16ebcb37769e2415aca599a5a0c901725e42b5f9"} Oct 01 14:03:27 crc kubenswrapper[4605]: I1001 14:03:27.440963 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-f85558977-q9rhp" event={"ID":"67c3654e-3eed-4260-8864-3ab0334a32a0","Type":"ContainerStarted","Data":"057c1dc47bee20c76466325990170d92df0602ead44c31931b4cb9c356a2cf34"} Oct 01 14:03:27 crc kubenswrapper[4605]: I1001 14:03:27.441031 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-f85558977-q9rhp" event={"ID":"67c3654e-3eed-4260-8864-3ab0334a32a0","Type":"ContainerStarted","Data":"40b866cf46d73e824d70fd43bb85523cfe515f452194f2f9c3b188ee26cd000c"} Oct 01 14:03:27 crc kubenswrapper[4605]: I1001 14:03:27.473379 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"899fd156-9323-4509-8caf-474844c688ef","Type":"ContainerDied","Data":"2bfd179030c91fe00463b9e99d2c50f19ed828c143d0eeef3873e65328cbddf5"} Oct 01 14:03:27 crc kubenswrapper[4605]: I1001 14:03:27.473448 4605 scope.go:117] "RemoveContainer" containerID="f6a6fdd0c06a1ec141cd568b1c8b3a5fa5b94345559421d279494f3b8aa70c96" Oct 01 14:03:27 crc kubenswrapper[4605]: I1001 14:03:27.473648 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 01 14:03:27 crc kubenswrapper[4605]: I1001 14:03:27.549983 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Oct 01 14:03:27 crc kubenswrapper[4605]: I1001 14:03:27.564949 4605 scope.go:117] "RemoveContainer" containerID="5a99040db5650682954b05430ab8444d4e20b31a51305ca8d505b07d5058ae13" Oct 01 14:03:27 crc kubenswrapper[4605]: I1001 14:03:27.688133 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Oct 01 14:03:27 crc kubenswrapper[4605]: I1001 14:03:27.719156 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Oct 01 14:03:27 crc kubenswrapper[4605]: E1001 14:03:27.727693 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="899fd156-9323-4509-8caf-474844c688ef" containerName="cinder-api-log" Oct 01 14:03:27 crc kubenswrapper[4605]: I1001 14:03:27.727711 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="899fd156-9323-4509-8caf-474844c688ef" containerName="cinder-api-log" Oct 01 14:03:27 crc kubenswrapper[4605]: E1001 14:03:27.727740 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="899fd156-9323-4509-8caf-474844c688ef" containerName="cinder-api" Oct 01 14:03:27 crc kubenswrapper[4605]: I1001 14:03:27.727746 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="899fd156-9323-4509-8caf-474844c688ef" containerName="cinder-api" Oct 01 14:03:27 crc kubenswrapper[4605]: I1001 14:03:27.731835 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="899fd156-9323-4509-8caf-474844c688ef" containerName="cinder-api" Oct 01 14:03:27 crc kubenswrapper[4605]: I1001 14:03:27.731887 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="899fd156-9323-4509-8caf-474844c688ef" containerName="cinder-api-log" Oct 01 14:03:27 crc kubenswrapper[4605]: I1001 14:03:27.732851 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 01 14:03:27 crc kubenswrapper[4605]: I1001 14:03:27.736653 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Oct 01 14:03:27 crc kubenswrapper[4605]: I1001 14:03:27.737428 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Oct 01 14:03:27 crc kubenswrapper[4605]: I1001 14:03:27.736657 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Oct 01 14:03:27 crc kubenswrapper[4605]: I1001 14:03:27.740757 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 01 14:03:27 crc kubenswrapper[4605]: I1001 14:03:27.796121 4605 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-558ff4bf9d-h49gp" podUID="1c70fd20-00d7-4bbd-9f5d-67b674a92e84" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.161:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 01 14:03:27 crc kubenswrapper[4605]: I1001 14:03:27.835110 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6a6bf8ea-cc99-43f4-913c-59039f5e11db-scripts\") pod \"cinder-api-0\" (UID: \"6a6bf8ea-cc99-43f4-913c-59039f5e11db\") " pod="openstack/cinder-api-0" Oct 01 14:03:27 crc kubenswrapper[4605]: I1001 14:03:27.835488 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6a6bf8ea-cc99-43f4-913c-59039f5e11db-config-data\") pod \"cinder-api-0\" (UID: \"6a6bf8ea-cc99-43f4-913c-59039f5e11db\") " pod="openstack/cinder-api-0" Oct 01 14:03:27 crc kubenswrapper[4605]: I1001 14:03:27.835583 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6a6bf8ea-cc99-43f4-913c-59039f5e11db-public-tls-certs\") pod \"cinder-api-0\" (UID: \"6a6bf8ea-cc99-43f4-913c-59039f5e11db\") " pod="openstack/cinder-api-0" Oct 01 14:03:27 crc kubenswrapper[4605]: I1001 14:03:27.835680 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6a6bf8ea-cc99-43f4-913c-59039f5e11db-etc-machine-id\") pod \"cinder-api-0\" (UID: \"6a6bf8ea-cc99-43f4-913c-59039f5e11db\") " pod="openstack/cinder-api-0" Oct 01 14:03:27 crc kubenswrapper[4605]: I1001 14:03:27.835797 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w8thg\" (UniqueName: \"kubernetes.io/projected/6a6bf8ea-cc99-43f4-913c-59039f5e11db-kube-api-access-w8thg\") pod \"cinder-api-0\" (UID: \"6a6bf8ea-cc99-43f4-913c-59039f5e11db\") " pod="openstack/cinder-api-0" Oct 01 14:03:27 crc kubenswrapper[4605]: I1001 14:03:27.835876 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6a6bf8ea-cc99-43f4-913c-59039f5e11db-config-data-custom\") pod \"cinder-api-0\" (UID: \"6a6bf8ea-cc99-43f4-913c-59039f5e11db\") " pod="openstack/cinder-api-0" Oct 01 14:03:27 crc kubenswrapper[4605]: I1001 14:03:27.835986 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6a6bf8ea-cc99-43f4-913c-59039f5e11db-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"6a6bf8ea-cc99-43f4-913c-59039f5e11db\") " pod="openstack/cinder-api-0" Oct 01 14:03:27 crc kubenswrapper[4605]: I1001 14:03:27.836156 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6a6bf8ea-cc99-43f4-913c-59039f5e11db-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"6a6bf8ea-cc99-43f4-913c-59039f5e11db\") " pod="openstack/cinder-api-0" Oct 01 14:03:27 crc kubenswrapper[4605]: I1001 14:03:27.836252 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6a6bf8ea-cc99-43f4-913c-59039f5e11db-logs\") pod \"cinder-api-0\" (UID: \"6a6bf8ea-cc99-43f4-913c-59039f5e11db\") " pod="openstack/cinder-api-0" Oct 01 14:03:27 crc kubenswrapper[4605]: I1001 14:03:27.837284 4605 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-558ff4bf9d-h49gp" podUID="1c70fd20-00d7-4bbd-9f5d-67b674a92e84" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.161:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 01 14:03:27 crc kubenswrapper[4605]: I1001 14:03:27.939063 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6a6bf8ea-cc99-43f4-913c-59039f5e11db-logs\") pod \"cinder-api-0\" (UID: \"6a6bf8ea-cc99-43f4-913c-59039f5e11db\") " pod="openstack/cinder-api-0" Oct 01 14:03:27 crc kubenswrapper[4605]: I1001 14:03:27.939452 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6a6bf8ea-cc99-43f4-913c-59039f5e11db-scripts\") pod \"cinder-api-0\" (UID: \"6a6bf8ea-cc99-43f4-913c-59039f5e11db\") " pod="openstack/cinder-api-0" Oct 01 14:03:27 crc kubenswrapper[4605]: I1001 14:03:27.939550 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6a6bf8ea-cc99-43f4-913c-59039f5e11db-config-data\") pod \"cinder-api-0\" (UID: \"6a6bf8ea-cc99-43f4-913c-59039f5e11db\") " pod="openstack/cinder-api-0" Oct 01 14:03:27 crc kubenswrapper[4605]: I1001 14:03:27.939638 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6a6bf8ea-cc99-43f4-913c-59039f5e11db-public-tls-certs\") pod \"cinder-api-0\" (UID: \"6a6bf8ea-cc99-43f4-913c-59039f5e11db\") " pod="openstack/cinder-api-0" Oct 01 14:03:27 crc kubenswrapper[4605]: I1001 14:03:27.939718 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6a6bf8ea-cc99-43f4-913c-59039f5e11db-etc-machine-id\") pod \"cinder-api-0\" (UID: \"6a6bf8ea-cc99-43f4-913c-59039f5e11db\") " pod="openstack/cinder-api-0" Oct 01 14:03:27 crc kubenswrapper[4605]: I1001 14:03:27.939827 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w8thg\" (UniqueName: \"kubernetes.io/projected/6a6bf8ea-cc99-43f4-913c-59039f5e11db-kube-api-access-w8thg\") pod \"cinder-api-0\" (UID: \"6a6bf8ea-cc99-43f4-913c-59039f5e11db\") " pod="openstack/cinder-api-0" Oct 01 14:03:27 crc kubenswrapper[4605]: I1001 14:03:27.939905 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6a6bf8ea-cc99-43f4-913c-59039f5e11db-config-data-custom\") pod \"cinder-api-0\" (UID: \"6a6bf8ea-cc99-43f4-913c-59039f5e11db\") " pod="openstack/cinder-api-0" Oct 01 14:03:27 crc kubenswrapper[4605]: I1001 14:03:27.939999 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6a6bf8ea-cc99-43f4-913c-59039f5e11db-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"6a6bf8ea-cc99-43f4-913c-59039f5e11db\") " pod="openstack/cinder-api-0" Oct 01 14:03:27 crc kubenswrapper[4605]: I1001 14:03:27.940756 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6a6bf8ea-cc99-43f4-913c-59039f5e11db-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"6a6bf8ea-cc99-43f4-913c-59039f5e11db\") " pod="openstack/cinder-api-0" Oct 01 14:03:27 crc kubenswrapper[4605]: I1001 14:03:27.939501 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6a6bf8ea-cc99-43f4-913c-59039f5e11db-logs\") pod \"cinder-api-0\" (UID: \"6a6bf8ea-cc99-43f4-913c-59039f5e11db\") " pod="openstack/cinder-api-0" Oct 01 14:03:27 crc kubenswrapper[4605]: I1001 14:03:27.943381 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6a6bf8ea-cc99-43f4-913c-59039f5e11db-etc-machine-id\") pod \"cinder-api-0\" (UID: \"6a6bf8ea-cc99-43f4-913c-59039f5e11db\") " pod="openstack/cinder-api-0" Oct 01 14:03:27 crc kubenswrapper[4605]: I1001 14:03:27.949953 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6a6bf8ea-cc99-43f4-913c-59039f5e11db-scripts\") pod \"cinder-api-0\" (UID: \"6a6bf8ea-cc99-43f4-913c-59039f5e11db\") " pod="openstack/cinder-api-0" Oct 01 14:03:27 crc kubenswrapper[4605]: I1001 14:03:27.950790 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6a6bf8ea-cc99-43f4-913c-59039f5e11db-config-data\") pod \"cinder-api-0\" (UID: \"6a6bf8ea-cc99-43f4-913c-59039f5e11db\") " pod="openstack/cinder-api-0" Oct 01 14:03:27 crc kubenswrapper[4605]: I1001 14:03:27.955603 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6a6bf8ea-cc99-43f4-913c-59039f5e11db-public-tls-certs\") pod \"cinder-api-0\" (UID: \"6a6bf8ea-cc99-43f4-913c-59039f5e11db\") " pod="openstack/cinder-api-0" Oct 01 14:03:27 crc kubenswrapper[4605]: I1001 14:03:27.960400 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="899fd156-9323-4509-8caf-474844c688ef" path="/var/lib/kubelet/pods/899fd156-9323-4509-8caf-474844c688ef/volumes" Oct 01 14:03:27 crc kubenswrapper[4605]: I1001 14:03:27.977641 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6a6bf8ea-cc99-43f4-913c-59039f5e11db-config-data-custom\") pod \"cinder-api-0\" (UID: \"6a6bf8ea-cc99-43f4-913c-59039f5e11db\") " pod="openstack/cinder-api-0" Oct 01 14:03:27 crc kubenswrapper[4605]: I1001 14:03:27.989727 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6a6bf8ea-cc99-43f4-913c-59039f5e11db-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"6a6bf8ea-cc99-43f4-913c-59039f5e11db\") " pod="openstack/cinder-api-0" Oct 01 14:03:28 crc kubenswrapper[4605]: I1001 14:03:28.005937 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6a6bf8ea-cc99-43f4-913c-59039f5e11db-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"6a6bf8ea-cc99-43f4-913c-59039f5e11db\") " pod="openstack/cinder-api-0" Oct 01 14:03:28 crc kubenswrapper[4605]: I1001 14:03:28.006490 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w8thg\" (UniqueName: \"kubernetes.io/projected/6a6bf8ea-cc99-43f4-913c-59039f5e11db-kube-api-access-w8thg\") pod \"cinder-api-0\" (UID: \"6a6bf8ea-cc99-43f4-913c-59039f5e11db\") " pod="openstack/cinder-api-0" Oct 01 14:03:28 crc kubenswrapper[4605]: I1001 14:03:28.074317 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 01 14:03:28 crc kubenswrapper[4605]: I1001 14:03:28.482189 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Oct 01 14:03:28 crc kubenswrapper[4605]: I1001 14:03:28.485488 4605 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/cinder-scheduler-0" podUID="23411507-ae6e-438e-84e6-3505947e632b" containerName="cinder-scheduler" probeResult="failure" output="Get \"http://10.217.0.163:8080/\": dial tcp 10.217.0.163:8080: connect: connection refused" Oct 01 14:03:28 crc kubenswrapper[4605]: I1001 14:03:28.514687 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-f85558977-q9rhp" event={"ID":"67c3654e-3eed-4260-8864-3ab0334a32a0","Type":"ContainerStarted","Data":"7bf62da2789582d691a7f8add5db9d7967c1d3648640709e38921fc1d37acbbb"} Oct 01 14:03:28 crc kubenswrapper[4605]: I1001 14:03:28.515209 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-f85558977-q9rhp" Oct 01 14:03:28 crc kubenswrapper[4605]: I1001 14:03:28.542367 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-f85558977-q9rhp" podStartSLOduration=3.542343688 podStartE2EDuration="3.542343688s" podCreationTimestamp="2025-10-01 14:03:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 14:03:28.531133594 +0000 UTC m=+1131.275109802" watchObservedRunningTime="2025-10-01 14:03:28.542343688 +0000 UTC m=+1131.286319896" Oct 01 14:03:28 crc kubenswrapper[4605]: I1001 14:03:28.648455 4605 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-64d6df575b-5ctbf" podUID="80fb1c51-bd86-4896-8dac-59747473f066" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.151:8443/dashboard/auth/login/?next=/dashboard/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 01 14:03:28 crc kubenswrapper[4605]: I1001 14:03:28.648721 4605 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-59b677b5cd-92trn" podUID="b92ac133-ded4-4276-a43a-7d9414d051ab" containerName="barbican-api" probeResult="failure" output="Get \"https://10.217.0.162:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 01 14:03:28 crc kubenswrapper[4605]: I1001 14:03:28.648864 4605 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/horizon-64d6df575b-5ctbf" podUID="80fb1c51-bd86-4896-8dac-59747473f066" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.151:8443/dashboard/auth/login/?next=/dashboard/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 01 14:03:28 crc kubenswrapper[4605]: W1001 14:03:28.685320 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6a6bf8ea_cc99_43f4_913c_59039f5e11db.slice/crio-8517b37f98da6e830aeeb73b6b5e6af25624a878d445074238ec46ff5620b6f8 WatchSource:0}: Error finding container 8517b37f98da6e830aeeb73b6b5e6af25624a878d445074238ec46ff5620b6f8: Status 404 returned error can't find the container with id 8517b37f98da6e830aeeb73b6b5e6af25624a878d445074238ec46ff5620b6f8 Oct 01 14:03:28 crc kubenswrapper[4605]: I1001 14:03:28.687063 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 01 14:03:29 crc kubenswrapper[4605]: I1001 14:03:29.534308 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"6a6bf8ea-cc99-43f4-913c-59039f5e11db","Type":"ContainerStarted","Data":"8517b37f98da6e830aeeb73b6b5e6af25624a878d445074238ec46ff5620b6f8"} Oct 01 14:03:29 crc kubenswrapper[4605]: I1001 14:03:29.567268 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5784cf869f-8rr76" Oct 01 14:03:29 crc kubenswrapper[4605]: I1001 14:03:29.627626 4605 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-59b677b5cd-92trn" podUID="b92ac133-ded4-4276-a43a-7d9414d051ab" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.0.162:9311/healthcheck\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 01 14:03:29 crc kubenswrapper[4605]: I1001 14:03:29.627839 4605 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-59b677b5cd-92trn" podUID="b92ac133-ded4-4276-a43a-7d9414d051ab" containerName="barbican-api" probeResult="failure" output="Get \"https://10.217.0.162:9311/healthcheck\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 01 14:03:29 crc kubenswrapper[4605]: I1001 14:03:29.637949 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-2dzk7"] Oct 01 14:03:29 crc kubenswrapper[4605]: I1001 14:03:29.641364 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-8b5c85b87-2dzk7" podUID="693b90b6-22ae-41d7-8a93-3d854bc9fd81" containerName="dnsmasq-dns" containerID="cri-o://6803302be87a5e09d672a89130ed5394675d83c8eb6cc499b9768f39e93d61c4" gracePeriod=10 Oct 01 14:03:30 crc kubenswrapper[4605]: I1001 14:03:30.578321 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b5c85b87-2dzk7" Oct 01 14:03:30 crc kubenswrapper[4605]: I1001 14:03:30.579238 4605 generic.go:334] "Generic (PLEG): container finished" podID="693b90b6-22ae-41d7-8a93-3d854bc9fd81" containerID="6803302be87a5e09d672a89130ed5394675d83c8eb6cc499b9768f39e93d61c4" exitCode=0 Oct 01 14:03:30 crc kubenswrapper[4605]: I1001 14:03:30.579300 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b5c85b87-2dzk7" event={"ID":"693b90b6-22ae-41d7-8a93-3d854bc9fd81","Type":"ContainerDied","Data":"6803302be87a5e09d672a89130ed5394675d83c8eb6cc499b9768f39e93d61c4"} Oct 01 14:03:30 crc kubenswrapper[4605]: I1001 14:03:30.579322 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b5c85b87-2dzk7" event={"ID":"693b90b6-22ae-41d7-8a93-3d854bc9fd81","Type":"ContainerDied","Data":"c20f0c1cf41e68b2081bfac252beb5f88a6fadcc45167a93748781e59ea83056"} Oct 01 14:03:30 crc kubenswrapper[4605]: I1001 14:03:30.579339 4605 scope.go:117] "RemoveContainer" containerID="6803302be87a5e09d672a89130ed5394675d83c8eb6cc499b9768f39e93d61c4" Oct 01 14:03:30 crc kubenswrapper[4605]: I1001 14:03:30.609261 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"6a6bf8ea-cc99-43f4-913c-59039f5e11db","Type":"ContainerStarted","Data":"a126aa6ec6d8d88545d5f64d0b2fadf24995fdb5c8388e5f45c8a5e54638fa90"} Oct 01 14:03:30 crc kubenswrapper[4605]: I1001 14:03:30.623932 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-558ff4bf9d-h49gp" Oct 01 14:03:30 crc kubenswrapper[4605]: I1001 14:03:30.639072 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/693b90b6-22ae-41d7-8a93-3d854bc9fd81-dns-swift-storage-0\") pod \"693b90b6-22ae-41d7-8a93-3d854bc9fd81\" (UID: \"693b90b6-22ae-41d7-8a93-3d854bc9fd81\") " Oct 01 14:03:30 crc kubenswrapper[4605]: I1001 14:03:30.639194 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/693b90b6-22ae-41d7-8a93-3d854bc9fd81-dns-svc\") pod \"693b90b6-22ae-41d7-8a93-3d854bc9fd81\" (UID: \"693b90b6-22ae-41d7-8a93-3d854bc9fd81\") " Oct 01 14:03:30 crc kubenswrapper[4605]: I1001 14:03:30.639222 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/693b90b6-22ae-41d7-8a93-3d854bc9fd81-config\") pod \"693b90b6-22ae-41d7-8a93-3d854bc9fd81\" (UID: \"693b90b6-22ae-41d7-8a93-3d854bc9fd81\") " Oct 01 14:03:30 crc kubenswrapper[4605]: I1001 14:03:30.639265 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/693b90b6-22ae-41d7-8a93-3d854bc9fd81-ovsdbserver-sb\") pod \"693b90b6-22ae-41d7-8a93-3d854bc9fd81\" (UID: \"693b90b6-22ae-41d7-8a93-3d854bc9fd81\") " Oct 01 14:03:30 crc kubenswrapper[4605]: I1001 14:03:30.639297 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vdxz2\" (UniqueName: \"kubernetes.io/projected/693b90b6-22ae-41d7-8a93-3d854bc9fd81-kube-api-access-vdxz2\") pod \"693b90b6-22ae-41d7-8a93-3d854bc9fd81\" (UID: \"693b90b6-22ae-41d7-8a93-3d854bc9fd81\") " Oct 01 14:03:30 crc kubenswrapper[4605]: I1001 14:03:30.639358 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/693b90b6-22ae-41d7-8a93-3d854bc9fd81-ovsdbserver-nb\") pod \"693b90b6-22ae-41d7-8a93-3d854bc9fd81\" (UID: \"693b90b6-22ae-41d7-8a93-3d854bc9fd81\") " Oct 01 14:03:30 crc kubenswrapper[4605]: I1001 14:03:30.641140 4605 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-59b677b5cd-92trn" podUID="b92ac133-ded4-4276-a43a-7d9414d051ab" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.0.162:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 01 14:03:30 crc kubenswrapper[4605]: I1001 14:03:30.660474 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/693b90b6-22ae-41d7-8a93-3d854bc9fd81-kube-api-access-vdxz2" (OuterVolumeSpecName: "kube-api-access-vdxz2") pod "693b90b6-22ae-41d7-8a93-3d854bc9fd81" (UID: "693b90b6-22ae-41d7-8a93-3d854bc9fd81"). InnerVolumeSpecName "kube-api-access-vdxz2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:03:30 crc kubenswrapper[4605]: I1001 14:03:30.678944 4605 scope.go:117] "RemoveContainer" containerID="d9b3f5d9a48843e08744e4608803f148ce1f280d91c0d3559796eae3b8c0c3cb" Oct 01 14:03:30 crc kubenswrapper[4605]: I1001 14:03:30.745888 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vdxz2\" (UniqueName: \"kubernetes.io/projected/693b90b6-22ae-41d7-8a93-3d854bc9fd81-kube-api-access-vdxz2\") on node \"crc\" DevicePath \"\"" Oct 01 14:03:30 crc kubenswrapper[4605]: I1001 14:03:30.766742 4605 scope.go:117] "RemoveContainer" containerID="6803302be87a5e09d672a89130ed5394675d83c8eb6cc499b9768f39e93d61c4" Oct 01 14:03:30 crc kubenswrapper[4605]: E1001 14:03:30.771366 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6803302be87a5e09d672a89130ed5394675d83c8eb6cc499b9768f39e93d61c4\": container with ID starting with 6803302be87a5e09d672a89130ed5394675d83c8eb6cc499b9768f39e93d61c4 not found: ID does not exist" containerID="6803302be87a5e09d672a89130ed5394675d83c8eb6cc499b9768f39e93d61c4" Oct 01 14:03:30 crc kubenswrapper[4605]: I1001 14:03:30.771429 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6803302be87a5e09d672a89130ed5394675d83c8eb6cc499b9768f39e93d61c4"} err="failed to get container status \"6803302be87a5e09d672a89130ed5394675d83c8eb6cc499b9768f39e93d61c4\": rpc error: code = NotFound desc = could not find container \"6803302be87a5e09d672a89130ed5394675d83c8eb6cc499b9768f39e93d61c4\": container with ID starting with 6803302be87a5e09d672a89130ed5394675d83c8eb6cc499b9768f39e93d61c4 not found: ID does not exist" Oct 01 14:03:30 crc kubenswrapper[4605]: I1001 14:03:30.771454 4605 scope.go:117] "RemoveContainer" containerID="d9b3f5d9a48843e08744e4608803f148ce1f280d91c0d3559796eae3b8c0c3cb" Oct 01 14:03:30 crc kubenswrapper[4605]: E1001 14:03:30.776507 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d9b3f5d9a48843e08744e4608803f148ce1f280d91c0d3559796eae3b8c0c3cb\": container with ID starting with d9b3f5d9a48843e08744e4608803f148ce1f280d91c0d3559796eae3b8c0c3cb not found: ID does not exist" containerID="d9b3f5d9a48843e08744e4608803f148ce1f280d91c0d3559796eae3b8c0c3cb" Oct 01 14:03:30 crc kubenswrapper[4605]: I1001 14:03:30.776563 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d9b3f5d9a48843e08744e4608803f148ce1f280d91c0d3559796eae3b8c0c3cb"} err="failed to get container status \"d9b3f5d9a48843e08744e4608803f148ce1f280d91c0d3559796eae3b8c0c3cb\": rpc error: code = NotFound desc = could not find container \"d9b3f5d9a48843e08744e4608803f148ce1f280d91c0d3559796eae3b8c0c3cb\": container with ID starting with d9b3f5d9a48843e08744e4608803f148ce1f280d91c0d3559796eae3b8c0c3cb not found: ID does not exist" Oct 01 14:03:30 crc kubenswrapper[4605]: I1001 14:03:30.800582 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/693b90b6-22ae-41d7-8a93-3d854bc9fd81-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "693b90b6-22ae-41d7-8a93-3d854bc9fd81" (UID: "693b90b6-22ae-41d7-8a93-3d854bc9fd81"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:03:30 crc kubenswrapper[4605]: I1001 14:03:30.825124 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/693b90b6-22ae-41d7-8a93-3d854bc9fd81-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "693b90b6-22ae-41d7-8a93-3d854bc9fd81" (UID: "693b90b6-22ae-41d7-8a93-3d854bc9fd81"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:03:30 crc kubenswrapper[4605]: I1001 14:03:30.830450 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/693b90b6-22ae-41d7-8a93-3d854bc9fd81-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "693b90b6-22ae-41d7-8a93-3d854bc9fd81" (UID: "693b90b6-22ae-41d7-8a93-3d854bc9fd81"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:03:30 crc kubenswrapper[4605]: I1001 14:03:30.834810 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/693b90b6-22ae-41d7-8a93-3d854bc9fd81-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "693b90b6-22ae-41d7-8a93-3d854bc9fd81" (UID: "693b90b6-22ae-41d7-8a93-3d854bc9fd81"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:03:30 crc kubenswrapper[4605]: I1001 14:03:30.847257 4605 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/693b90b6-22ae-41d7-8a93-3d854bc9fd81-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 01 14:03:30 crc kubenswrapper[4605]: I1001 14:03:30.847284 4605 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/693b90b6-22ae-41d7-8a93-3d854bc9fd81-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 01 14:03:30 crc kubenswrapper[4605]: I1001 14:03:30.847294 4605 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/693b90b6-22ae-41d7-8a93-3d854bc9fd81-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 01 14:03:30 crc kubenswrapper[4605]: I1001 14:03:30.847304 4605 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/693b90b6-22ae-41d7-8a93-3d854bc9fd81-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 01 14:03:30 crc kubenswrapper[4605]: I1001 14:03:30.848812 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/693b90b6-22ae-41d7-8a93-3d854bc9fd81-config" (OuterVolumeSpecName: "config") pod "693b90b6-22ae-41d7-8a93-3d854bc9fd81" (UID: "693b90b6-22ae-41d7-8a93-3d854bc9fd81"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:03:30 crc kubenswrapper[4605]: I1001 14:03:30.949788 4605 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/693b90b6-22ae-41d7-8a93-3d854bc9fd81-config\") on node \"crc\" DevicePath \"\"" Oct 01 14:03:31 crc kubenswrapper[4605]: I1001 14:03:31.285286 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Oct 01 14:03:31 crc kubenswrapper[4605]: E1001 14:03:31.285628 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="693b90b6-22ae-41d7-8a93-3d854bc9fd81" containerName="dnsmasq-dns" Oct 01 14:03:31 crc kubenswrapper[4605]: I1001 14:03:31.285641 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="693b90b6-22ae-41d7-8a93-3d854bc9fd81" containerName="dnsmasq-dns" Oct 01 14:03:31 crc kubenswrapper[4605]: E1001 14:03:31.285666 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="693b90b6-22ae-41d7-8a93-3d854bc9fd81" containerName="init" Oct 01 14:03:31 crc kubenswrapper[4605]: I1001 14:03:31.285672 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="693b90b6-22ae-41d7-8a93-3d854bc9fd81" containerName="init" Oct 01 14:03:31 crc kubenswrapper[4605]: I1001 14:03:31.285837 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="693b90b6-22ae-41d7-8a93-3d854bc9fd81" containerName="dnsmasq-dns" Oct 01 14:03:31 crc kubenswrapper[4605]: I1001 14:03:31.286410 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 01 14:03:31 crc kubenswrapper[4605]: I1001 14:03:31.288850 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-c68lz" Oct 01 14:03:31 crc kubenswrapper[4605]: I1001 14:03:31.289876 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Oct 01 14:03:31 crc kubenswrapper[4605]: I1001 14:03:31.298617 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Oct 01 14:03:31 crc kubenswrapper[4605]: I1001 14:03:31.298941 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Oct 01 14:03:31 crc kubenswrapper[4605]: I1001 14:03:31.459019 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58264b9f-ddeb-466d-94aa-536c1a381308-combined-ca-bundle\") pod \"openstackclient\" (UID: \"58264b9f-ddeb-466d-94aa-536c1a381308\") " pod="openstack/openstackclient" Oct 01 14:03:31 crc kubenswrapper[4605]: I1001 14:03:31.459122 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zdb6d\" (UniqueName: \"kubernetes.io/projected/58264b9f-ddeb-466d-94aa-536c1a381308-kube-api-access-zdb6d\") pod \"openstackclient\" (UID: \"58264b9f-ddeb-466d-94aa-536c1a381308\") " pod="openstack/openstackclient" Oct 01 14:03:31 crc kubenswrapper[4605]: I1001 14:03:31.459151 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/58264b9f-ddeb-466d-94aa-536c1a381308-openstack-config\") pod \"openstackclient\" (UID: \"58264b9f-ddeb-466d-94aa-536c1a381308\") " pod="openstack/openstackclient" Oct 01 14:03:31 crc kubenswrapper[4605]: I1001 14:03:31.459183 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/58264b9f-ddeb-466d-94aa-536c1a381308-openstack-config-secret\") pod \"openstackclient\" (UID: \"58264b9f-ddeb-466d-94aa-536c1a381308\") " pod="openstack/openstackclient" Oct 01 14:03:31 crc kubenswrapper[4605]: I1001 14:03:31.561412 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58264b9f-ddeb-466d-94aa-536c1a381308-combined-ca-bundle\") pod \"openstackclient\" (UID: \"58264b9f-ddeb-466d-94aa-536c1a381308\") " pod="openstack/openstackclient" Oct 01 14:03:31 crc kubenswrapper[4605]: I1001 14:03:31.562462 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zdb6d\" (UniqueName: \"kubernetes.io/projected/58264b9f-ddeb-466d-94aa-536c1a381308-kube-api-access-zdb6d\") pod \"openstackclient\" (UID: \"58264b9f-ddeb-466d-94aa-536c1a381308\") " pod="openstack/openstackclient" Oct 01 14:03:31 crc kubenswrapper[4605]: I1001 14:03:31.562492 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/58264b9f-ddeb-466d-94aa-536c1a381308-openstack-config\") pod \"openstackclient\" (UID: \"58264b9f-ddeb-466d-94aa-536c1a381308\") " pod="openstack/openstackclient" Oct 01 14:03:31 crc kubenswrapper[4605]: I1001 14:03:31.562522 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/58264b9f-ddeb-466d-94aa-536c1a381308-openstack-config-secret\") pod \"openstackclient\" (UID: \"58264b9f-ddeb-466d-94aa-536c1a381308\") " pod="openstack/openstackclient" Oct 01 14:03:31 crc kubenswrapper[4605]: I1001 14:03:31.563306 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/58264b9f-ddeb-466d-94aa-536c1a381308-openstack-config\") pod \"openstackclient\" (UID: \"58264b9f-ddeb-466d-94aa-536c1a381308\") " pod="openstack/openstackclient" Oct 01 14:03:31 crc kubenswrapper[4605]: I1001 14:03:31.567004 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58264b9f-ddeb-466d-94aa-536c1a381308-combined-ca-bundle\") pod \"openstackclient\" (UID: \"58264b9f-ddeb-466d-94aa-536c1a381308\") " pod="openstack/openstackclient" Oct 01 14:03:31 crc kubenswrapper[4605]: I1001 14:03:31.569080 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/58264b9f-ddeb-466d-94aa-536c1a381308-openstack-config-secret\") pod \"openstackclient\" (UID: \"58264b9f-ddeb-466d-94aa-536c1a381308\") " pod="openstack/openstackclient" Oct 01 14:03:31 crc kubenswrapper[4605]: I1001 14:03:31.580951 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zdb6d\" (UniqueName: \"kubernetes.io/projected/58264b9f-ddeb-466d-94aa-536c1a381308-kube-api-access-zdb6d\") pod \"openstackclient\" (UID: \"58264b9f-ddeb-466d-94aa-536c1a381308\") " pod="openstack/openstackclient" Oct 01 14:03:31 crc kubenswrapper[4605]: I1001 14:03:31.606907 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 01 14:03:31 crc kubenswrapper[4605]: I1001 14:03:31.651708 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b5c85b87-2dzk7" Oct 01 14:03:31 crc kubenswrapper[4605]: I1001 14:03:31.670296 4605 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-558ff4bf9d-h49gp" podUID="1c70fd20-00d7-4bbd-9f5d-67b674a92e84" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.161:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 01 14:03:31 crc kubenswrapper[4605]: I1001 14:03:31.689861 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"6a6bf8ea-cc99-43f4-913c-59039f5e11db","Type":"ContainerStarted","Data":"1fa2ef8e5ec90515e1b3968f13c839b9a61149bdfe07163297d04745b1bc54f9"} Oct 01 14:03:31 crc kubenswrapper[4605]: I1001 14:03:31.690244 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Oct 01 14:03:31 crc kubenswrapper[4605]: I1001 14:03:31.730770 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=4.730751517 podStartE2EDuration="4.730751517s" podCreationTimestamp="2025-10-01 14:03:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 14:03:31.71745365 +0000 UTC m=+1134.461429858" watchObservedRunningTime="2025-10-01 14:03:31.730751517 +0000 UTC m=+1134.474727725" Oct 01 14:03:31 crc kubenswrapper[4605]: I1001 14:03:31.760527 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-2dzk7"] Oct 01 14:03:31 crc kubenswrapper[4605]: I1001 14:03:31.811579 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-2dzk7"] Oct 01 14:03:31 crc kubenswrapper[4605]: I1001 14:03:31.950021 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="693b90b6-22ae-41d7-8a93-3d854bc9fd81" path="/var/lib/kubelet/pods/693b90b6-22ae-41d7-8a93-3d854bc9fd81/volumes" Oct 01 14:03:32 crc kubenswrapper[4605]: I1001 14:03:32.275414 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Oct 01 14:03:32 crc kubenswrapper[4605]: I1001 14:03:32.698346 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"58264b9f-ddeb-466d-94aa-536c1a381308","Type":"ContainerStarted","Data":"fb4e6b6aa4c559627c202d556b83919e2760d38fa610c2a60724f456e5e6e22e"} Oct 01 14:03:32 crc kubenswrapper[4605]: I1001 14:03:32.722625 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-59b677b5cd-92trn" Oct 01 14:03:33 crc kubenswrapper[4605]: I1001 14:03:33.306163 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-59b677b5cd-92trn" Oct 01 14:03:33 crc kubenswrapper[4605]: I1001 14:03:33.371456 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-558ff4bf9d-h49gp"] Oct 01 14:03:33 crc kubenswrapper[4605]: I1001 14:03:33.371694 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-558ff4bf9d-h49gp" podUID="1c70fd20-00d7-4bbd-9f5d-67b674a92e84" containerName="barbican-api-log" containerID="cri-o://61da8199bcc9a0bdcbbf44a1d6207513c89c9d2974c0410fc8db3ad2767b1021" gracePeriod=30 Oct 01 14:03:33 crc kubenswrapper[4605]: I1001 14:03:33.371828 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-558ff4bf9d-h49gp" podUID="1c70fd20-00d7-4bbd-9f5d-67b674a92e84" containerName="barbican-api" containerID="cri-o://55f7603b22bd09a377822f92e6b22c4bd608fe8fa42097306988fd22c8d52d95" gracePeriod=30 Oct 01 14:03:33 crc kubenswrapper[4605]: I1001 14:03:33.528352 4605 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-749477d64b-5dpnm" podUID="19af3ac5-8b1a-4301-88a2-96ae085ee9e0" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.150:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.150:8443: connect: connection refused" Oct 01 14:03:33 crc kubenswrapper[4605]: I1001 14:03:33.731173 4605 generic.go:334] "Generic (PLEG): container finished" podID="1c70fd20-00d7-4bbd-9f5d-67b674a92e84" containerID="61da8199bcc9a0bdcbbf44a1d6207513c89c9d2974c0410fc8db3ad2767b1021" exitCode=143 Oct 01 14:03:33 crc kubenswrapper[4605]: I1001 14:03:33.731463 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-558ff4bf9d-h49gp" event={"ID":"1c70fd20-00d7-4bbd-9f5d-67b674a92e84","Type":"ContainerDied","Data":"61da8199bcc9a0bdcbbf44a1d6207513c89c9d2974c0410fc8db3ad2767b1021"} Oct 01 14:03:33 crc kubenswrapper[4605]: I1001 14:03:33.808590 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Oct 01 14:03:33 crc kubenswrapper[4605]: I1001 14:03:33.844397 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 01 14:03:34 crc kubenswrapper[4605]: I1001 14:03:34.746370 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="23411507-ae6e-438e-84e6-3505947e632b" containerName="cinder-scheduler" containerID="cri-o://96e5f5adf67b8bfa82b4f32208b1182d7d6193535d085d70b443c588f528959b" gracePeriod=30 Oct 01 14:03:34 crc kubenswrapper[4605]: I1001 14:03:34.746679 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="23411507-ae6e-438e-84e6-3505947e632b" containerName="probe" containerID="cri-o://9a1f4c13caf7d4f48175afeb8a6cbcedd0022b8baeacde596ed624b53c65a6a7" gracePeriod=30 Oct 01 14:03:35 crc kubenswrapper[4605]: I1001 14:03:35.756832 4605 generic.go:334] "Generic (PLEG): container finished" podID="23411507-ae6e-438e-84e6-3505947e632b" containerID="9a1f4c13caf7d4f48175afeb8a6cbcedd0022b8baeacde596ed624b53c65a6a7" exitCode=0 Oct 01 14:03:35 crc kubenswrapper[4605]: I1001 14:03:35.756900 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"23411507-ae6e-438e-84e6-3505947e632b","Type":"ContainerDied","Data":"9a1f4c13caf7d4f48175afeb8a6cbcedd0022b8baeacde596ed624b53c65a6a7"} Oct 01 14:03:36 crc kubenswrapper[4605]: I1001 14:03:36.196802 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Oct 01 14:03:36 crc kubenswrapper[4605]: I1001 14:03:36.691001 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 01 14:03:36 crc kubenswrapper[4605]: I1001 14:03:36.764654 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="45659557-27c7-4a59-afbf-27c09718d6f7" containerName="sg-core" containerID="cri-o://9071895aa65ccbe403e7e2bf405c882b4c217f9992dc5831303042425173982a" gracePeriod=30 Oct 01 14:03:36 crc kubenswrapper[4605]: I1001 14:03:36.764666 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="45659557-27c7-4a59-afbf-27c09718d6f7" containerName="ceilometer-notification-agent" containerID="cri-o://d764c8bddc16be2af1844bea8e0e18cf2a336f52a5afde15acccbe2700912180" gracePeriod=30 Oct 01 14:03:36 crc kubenswrapper[4605]: I1001 14:03:36.764667 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="45659557-27c7-4a59-afbf-27c09718d6f7" containerName="proxy-httpd" containerID="cri-o://a7165cd54ef4832d478f584e30401a0459bd23248c5f67491ba5551bbb78b95d" gracePeriod=30 Oct 01 14:03:36 crc kubenswrapper[4605]: I1001 14:03:36.764843 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="45659557-27c7-4a59-afbf-27c09718d6f7" containerName="ceilometer-central-agent" containerID="cri-o://ec311de54a1fe907b20987b92ee0d73cf7dd1c8ba41b14d2e6aa6f047f0871a6" gracePeriod=30 Oct 01 14:03:36 crc kubenswrapper[4605]: I1001 14:03:36.872169 4605 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-558ff4bf9d-h49gp" podUID="1c70fd20-00d7-4bbd-9f5d-67b674a92e84" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.161:9311/healthcheck\": read tcp 10.217.0.2:50196->10.217.0.161:9311: read: connection reset by peer" Oct 01 14:03:36 crc kubenswrapper[4605]: I1001 14:03:36.872193 4605 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-558ff4bf9d-h49gp" podUID="1c70fd20-00d7-4bbd-9f5d-67b674a92e84" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.161:9311/healthcheck\": read tcp 10.217.0.2:50198->10.217.0.161:9311: read: connection reset by peer" Oct 01 14:03:37 crc kubenswrapper[4605]: I1001 14:03:37.399817 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-558ff4bf9d-h49gp" Oct 01 14:03:37 crc kubenswrapper[4605]: I1001 14:03:37.532285 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1c70fd20-00d7-4bbd-9f5d-67b674a92e84-logs\") pod \"1c70fd20-00d7-4bbd-9f5d-67b674a92e84\" (UID: \"1c70fd20-00d7-4bbd-9f5d-67b674a92e84\") " Oct 01 14:03:37 crc kubenswrapper[4605]: I1001 14:03:37.532338 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cttzr\" (UniqueName: \"kubernetes.io/projected/1c70fd20-00d7-4bbd-9f5d-67b674a92e84-kube-api-access-cttzr\") pod \"1c70fd20-00d7-4bbd-9f5d-67b674a92e84\" (UID: \"1c70fd20-00d7-4bbd-9f5d-67b674a92e84\") " Oct 01 14:03:37 crc kubenswrapper[4605]: I1001 14:03:37.532380 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c70fd20-00d7-4bbd-9f5d-67b674a92e84-combined-ca-bundle\") pod \"1c70fd20-00d7-4bbd-9f5d-67b674a92e84\" (UID: \"1c70fd20-00d7-4bbd-9f5d-67b674a92e84\") " Oct 01 14:03:37 crc kubenswrapper[4605]: I1001 14:03:37.532457 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1c70fd20-00d7-4bbd-9f5d-67b674a92e84-config-data-custom\") pod \"1c70fd20-00d7-4bbd-9f5d-67b674a92e84\" (UID: \"1c70fd20-00d7-4bbd-9f5d-67b674a92e84\") " Oct 01 14:03:37 crc kubenswrapper[4605]: I1001 14:03:37.532506 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c70fd20-00d7-4bbd-9f5d-67b674a92e84-config-data\") pod \"1c70fd20-00d7-4bbd-9f5d-67b674a92e84\" (UID: \"1c70fd20-00d7-4bbd-9f5d-67b674a92e84\") " Oct 01 14:03:37 crc kubenswrapper[4605]: I1001 14:03:37.533726 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1c70fd20-00d7-4bbd-9f5d-67b674a92e84-logs" (OuterVolumeSpecName: "logs") pod "1c70fd20-00d7-4bbd-9f5d-67b674a92e84" (UID: "1c70fd20-00d7-4bbd-9f5d-67b674a92e84"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:03:37 crc kubenswrapper[4605]: I1001 14:03:37.549364 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1c70fd20-00d7-4bbd-9f5d-67b674a92e84-kube-api-access-cttzr" (OuterVolumeSpecName: "kube-api-access-cttzr") pod "1c70fd20-00d7-4bbd-9f5d-67b674a92e84" (UID: "1c70fd20-00d7-4bbd-9f5d-67b674a92e84"). InnerVolumeSpecName "kube-api-access-cttzr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:03:37 crc kubenswrapper[4605]: I1001 14:03:37.566580 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c70fd20-00d7-4bbd-9f5d-67b674a92e84-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "1c70fd20-00d7-4bbd-9f5d-67b674a92e84" (UID: "1c70fd20-00d7-4bbd-9f5d-67b674a92e84"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:03:37 crc kubenswrapper[4605]: I1001 14:03:37.577255 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c70fd20-00d7-4bbd-9f5d-67b674a92e84-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1c70fd20-00d7-4bbd-9f5d-67b674a92e84" (UID: "1c70fd20-00d7-4bbd-9f5d-67b674a92e84"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:03:37 crc kubenswrapper[4605]: I1001 14:03:37.647929 4605 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1c70fd20-00d7-4bbd-9f5d-67b674a92e84-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 01 14:03:37 crc kubenswrapper[4605]: I1001 14:03:37.647970 4605 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1c70fd20-00d7-4bbd-9f5d-67b674a92e84-logs\") on node \"crc\" DevicePath \"\"" Oct 01 14:03:37 crc kubenswrapper[4605]: I1001 14:03:37.647979 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cttzr\" (UniqueName: \"kubernetes.io/projected/1c70fd20-00d7-4bbd-9f5d-67b674a92e84-kube-api-access-cttzr\") on node \"crc\" DevicePath \"\"" Oct 01 14:03:37 crc kubenswrapper[4605]: I1001 14:03:37.647989 4605 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c70fd20-00d7-4bbd-9f5d-67b674a92e84-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 14:03:37 crc kubenswrapper[4605]: I1001 14:03:37.666161 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c70fd20-00d7-4bbd-9f5d-67b674a92e84-config-data" (OuterVolumeSpecName: "config-data") pod "1c70fd20-00d7-4bbd-9f5d-67b674a92e84" (UID: "1c70fd20-00d7-4bbd-9f5d-67b674a92e84"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:03:37 crc kubenswrapper[4605]: I1001 14:03:37.711448 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 01 14:03:37 crc kubenswrapper[4605]: I1001 14:03:37.752393 4605 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c70fd20-00d7-4bbd-9f5d-67b674a92e84-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 14:03:37 crc kubenswrapper[4605]: I1001 14:03:37.779647 4605 generic.go:334] "Generic (PLEG): container finished" podID="1c70fd20-00d7-4bbd-9f5d-67b674a92e84" containerID="55f7603b22bd09a377822f92e6b22c4bd608fe8fa42097306988fd22c8d52d95" exitCode=0 Oct 01 14:03:37 crc kubenswrapper[4605]: I1001 14:03:37.779718 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-558ff4bf9d-h49gp" event={"ID":"1c70fd20-00d7-4bbd-9f5d-67b674a92e84","Type":"ContainerDied","Data":"55f7603b22bd09a377822f92e6b22c4bd608fe8fa42097306988fd22c8d52d95"} Oct 01 14:03:37 crc kubenswrapper[4605]: I1001 14:03:37.779750 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-558ff4bf9d-h49gp" event={"ID":"1c70fd20-00d7-4bbd-9f5d-67b674a92e84","Type":"ContainerDied","Data":"1f32c21d82de8cbd74e4a04032fbfbfc1bcbbbe642a994499875cf7213a578ee"} Oct 01 14:03:37 crc kubenswrapper[4605]: I1001 14:03:37.779770 4605 scope.go:117] "RemoveContainer" containerID="55f7603b22bd09a377822f92e6b22c4bd608fe8fa42097306988fd22c8d52d95" Oct 01 14:03:37 crc kubenswrapper[4605]: I1001 14:03:37.779931 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-558ff4bf9d-h49gp" Oct 01 14:03:37 crc kubenswrapper[4605]: I1001 14:03:37.806821 4605 generic.go:334] "Generic (PLEG): container finished" podID="45659557-27c7-4a59-afbf-27c09718d6f7" containerID="a7165cd54ef4832d478f584e30401a0459bd23248c5f67491ba5551bbb78b95d" exitCode=0 Oct 01 14:03:37 crc kubenswrapper[4605]: I1001 14:03:37.806853 4605 generic.go:334] "Generic (PLEG): container finished" podID="45659557-27c7-4a59-afbf-27c09718d6f7" containerID="9071895aa65ccbe403e7e2bf405c882b4c217f9992dc5831303042425173982a" exitCode=2 Oct 01 14:03:37 crc kubenswrapper[4605]: I1001 14:03:37.806861 4605 generic.go:334] "Generic (PLEG): container finished" podID="45659557-27c7-4a59-afbf-27c09718d6f7" containerID="ec311de54a1fe907b20987b92ee0d73cf7dd1c8ba41b14d2e6aa6f047f0871a6" exitCode=0 Oct 01 14:03:37 crc kubenswrapper[4605]: I1001 14:03:37.806920 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"45659557-27c7-4a59-afbf-27c09718d6f7","Type":"ContainerDied","Data":"a7165cd54ef4832d478f584e30401a0459bd23248c5f67491ba5551bbb78b95d"} Oct 01 14:03:37 crc kubenswrapper[4605]: I1001 14:03:37.806944 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"45659557-27c7-4a59-afbf-27c09718d6f7","Type":"ContainerDied","Data":"9071895aa65ccbe403e7e2bf405c882b4c217f9992dc5831303042425173982a"} Oct 01 14:03:37 crc kubenswrapper[4605]: I1001 14:03:37.806954 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"45659557-27c7-4a59-afbf-27c09718d6f7","Type":"ContainerDied","Data":"ec311de54a1fe907b20987b92ee0d73cf7dd1c8ba41b14d2e6aa6f047f0871a6"} Oct 01 14:03:37 crc kubenswrapper[4605]: I1001 14:03:37.822549 4605 generic.go:334] "Generic (PLEG): container finished" podID="23411507-ae6e-438e-84e6-3505947e632b" containerID="96e5f5adf67b8bfa82b4f32208b1182d7d6193535d085d70b443c588f528959b" exitCode=0 Oct 01 14:03:37 crc kubenswrapper[4605]: I1001 14:03:37.822779 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 01 14:03:37 crc kubenswrapper[4605]: I1001 14:03:37.822783 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"23411507-ae6e-438e-84e6-3505947e632b","Type":"ContainerDied","Data":"96e5f5adf67b8bfa82b4f32208b1182d7d6193535d085d70b443c588f528959b"} Oct 01 14:03:37 crc kubenswrapper[4605]: I1001 14:03:37.823588 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"23411507-ae6e-438e-84e6-3505947e632b","Type":"ContainerDied","Data":"384fc5e265f5110d3e9682dccae50a7ccef2078bd67f945d346ec3ef69379934"} Oct 01 14:03:37 crc kubenswrapper[4605]: I1001 14:03:37.833173 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-558ff4bf9d-h49gp"] Oct 01 14:03:37 crc kubenswrapper[4605]: I1001 14:03:37.835648 4605 scope.go:117] "RemoveContainer" containerID="61da8199bcc9a0bdcbbf44a1d6207513c89c9d2974c0410fc8db3ad2767b1021" Oct 01 14:03:37 crc kubenswrapper[4605]: I1001 14:03:37.838197 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-558ff4bf9d-h49gp"] Oct 01 14:03:37 crc kubenswrapper[4605]: I1001 14:03:37.853300 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/23411507-ae6e-438e-84e6-3505947e632b-scripts\") pod \"23411507-ae6e-438e-84e6-3505947e632b\" (UID: \"23411507-ae6e-438e-84e6-3505947e632b\") " Oct 01 14:03:37 crc kubenswrapper[4605]: I1001 14:03:37.853400 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23411507-ae6e-438e-84e6-3505947e632b-combined-ca-bundle\") pod \"23411507-ae6e-438e-84e6-3505947e632b\" (UID: \"23411507-ae6e-438e-84e6-3505947e632b\") " Oct 01 14:03:37 crc kubenswrapper[4605]: I1001 14:03:37.853517 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/23411507-ae6e-438e-84e6-3505947e632b-config-data-custom\") pod \"23411507-ae6e-438e-84e6-3505947e632b\" (UID: \"23411507-ae6e-438e-84e6-3505947e632b\") " Oct 01 14:03:37 crc kubenswrapper[4605]: I1001 14:03:37.853541 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/23411507-ae6e-438e-84e6-3505947e632b-etc-machine-id\") pod \"23411507-ae6e-438e-84e6-3505947e632b\" (UID: \"23411507-ae6e-438e-84e6-3505947e632b\") " Oct 01 14:03:37 crc kubenswrapper[4605]: I1001 14:03:37.853564 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23411507-ae6e-438e-84e6-3505947e632b-config-data\") pod \"23411507-ae6e-438e-84e6-3505947e632b\" (UID: \"23411507-ae6e-438e-84e6-3505947e632b\") " Oct 01 14:03:37 crc kubenswrapper[4605]: I1001 14:03:37.853585 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7kn8d\" (UniqueName: \"kubernetes.io/projected/23411507-ae6e-438e-84e6-3505947e632b-kube-api-access-7kn8d\") pod \"23411507-ae6e-438e-84e6-3505947e632b\" (UID: \"23411507-ae6e-438e-84e6-3505947e632b\") " Oct 01 14:03:37 crc kubenswrapper[4605]: I1001 14:03:37.854465 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/23411507-ae6e-438e-84e6-3505947e632b-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "23411507-ae6e-438e-84e6-3505947e632b" (UID: "23411507-ae6e-438e-84e6-3505947e632b"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 14:03:37 crc kubenswrapper[4605]: I1001 14:03:37.871079 4605 scope.go:117] "RemoveContainer" containerID="55f7603b22bd09a377822f92e6b22c4bd608fe8fa42097306988fd22c8d52d95" Oct 01 14:03:37 crc kubenswrapper[4605]: E1001 14:03:37.871872 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"55f7603b22bd09a377822f92e6b22c4bd608fe8fa42097306988fd22c8d52d95\": container with ID starting with 55f7603b22bd09a377822f92e6b22c4bd608fe8fa42097306988fd22c8d52d95 not found: ID does not exist" containerID="55f7603b22bd09a377822f92e6b22c4bd608fe8fa42097306988fd22c8d52d95" Oct 01 14:03:37 crc kubenswrapper[4605]: I1001 14:03:37.871958 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"55f7603b22bd09a377822f92e6b22c4bd608fe8fa42097306988fd22c8d52d95"} err="failed to get container status \"55f7603b22bd09a377822f92e6b22c4bd608fe8fa42097306988fd22c8d52d95\": rpc error: code = NotFound desc = could not find container \"55f7603b22bd09a377822f92e6b22c4bd608fe8fa42097306988fd22c8d52d95\": container with ID starting with 55f7603b22bd09a377822f92e6b22c4bd608fe8fa42097306988fd22c8d52d95 not found: ID does not exist" Oct 01 14:03:37 crc kubenswrapper[4605]: I1001 14:03:37.872056 4605 scope.go:117] "RemoveContainer" containerID="61da8199bcc9a0bdcbbf44a1d6207513c89c9d2974c0410fc8db3ad2767b1021" Oct 01 14:03:37 crc kubenswrapper[4605]: E1001 14:03:37.872335 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"61da8199bcc9a0bdcbbf44a1d6207513c89c9d2974c0410fc8db3ad2767b1021\": container with ID starting with 61da8199bcc9a0bdcbbf44a1d6207513c89c9d2974c0410fc8db3ad2767b1021 not found: ID does not exist" containerID="61da8199bcc9a0bdcbbf44a1d6207513c89c9d2974c0410fc8db3ad2767b1021" Oct 01 14:03:37 crc kubenswrapper[4605]: I1001 14:03:37.872452 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"61da8199bcc9a0bdcbbf44a1d6207513c89c9d2974c0410fc8db3ad2767b1021"} err="failed to get container status \"61da8199bcc9a0bdcbbf44a1d6207513c89c9d2974c0410fc8db3ad2767b1021\": rpc error: code = NotFound desc = could not find container \"61da8199bcc9a0bdcbbf44a1d6207513c89c9d2974c0410fc8db3ad2767b1021\": container with ID starting with 61da8199bcc9a0bdcbbf44a1d6207513c89c9d2974c0410fc8db3ad2767b1021 not found: ID does not exist" Oct 01 14:03:37 crc kubenswrapper[4605]: I1001 14:03:37.872754 4605 scope.go:117] "RemoveContainer" containerID="9a1f4c13caf7d4f48175afeb8a6cbcedd0022b8baeacde596ed624b53c65a6a7" Oct 01 14:03:37 crc kubenswrapper[4605]: I1001 14:03:37.878073 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23411507-ae6e-438e-84e6-3505947e632b-scripts" (OuterVolumeSpecName: "scripts") pod "23411507-ae6e-438e-84e6-3505947e632b" (UID: "23411507-ae6e-438e-84e6-3505947e632b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:03:37 crc kubenswrapper[4605]: I1001 14:03:37.892471 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23411507-ae6e-438e-84e6-3505947e632b-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "23411507-ae6e-438e-84e6-3505947e632b" (UID: "23411507-ae6e-438e-84e6-3505947e632b"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:03:37 crc kubenswrapper[4605]: I1001 14:03:37.919423 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/23411507-ae6e-438e-84e6-3505947e632b-kube-api-access-7kn8d" (OuterVolumeSpecName: "kube-api-access-7kn8d") pod "23411507-ae6e-438e-84e6-3505947e632b" (UID: "23411507-ae6e-438e-84e6-3505947e632b"). InnerVolumeSpecName "kube-api-access-7kn8d". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:03:37 crc kubenswrapper[4605]: I1001 14:03:37.949547 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23411507-ae6e-438e-84e6-3505947e632b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "23411507-ae6e-438e-84e6-3505947e632b" (UID: "23411507-ae6e-438e-84e6-3505947e632b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:03:37 crc kubenswrapper[4605]: I1001 14:03:37.958060 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1c70fd20-00d7-4bbd-9f5d-67b674a92e84" path="/var/lib/kubelet/pods/1c70fd20-00d7-4bbd-9f5d-67b674a92e84/volumes" Oct 01 14:03:37 crc kubenswrapper[4605]: I1001 14:03:37.958122 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7kn8d\" (UniqueName: \"kubernetes.io/projected/23411507-ae6e-438e-84e6-3505947e632b-kube-api-access-7kn8d\") on node \"crc\" DevicePath \"\"" Oct 01 14:03:37 crc kubenswrapper[4605]: I1001 14:03:37.959199 4605 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/23411507-ae6e-438e-84e6-3505947e632b-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 14:03:37 crc kubenswrapper[4605]: I1001 14:03:37.959211 4605 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23411507-ae6e-438e-84e6-3505947e632b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 14:03:37 crc kubenswrapper[4605]: I1001 14:03:37.959220 4605 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/23411507-ae6e-438e-84e6-3505947e632b-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 01 14:03:37 crc kubenswrapper[4605]: I1001 14:03:37.959250 4605 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/23411507-ae6e-438e-84e6-3505947e632b-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 01 14:03:38 crc kubenswrapper[4605]: I1001 14:03:38.042155 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23411507-ae6e-438e-84e6-3505947e632b-config-data" (OuterVolumeSpecName: "config-data") pod "23411507-ae6e-438e-84e6-3505947e632b" (UID: "23411507-ae6e-438e-84e6-3505947e632b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:03:38 crc kubenswrapper[4605]: I1001 14:03:38.049711 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-66b5967899-cv4c4"] Oct 01 14:03:38 crc kubenswrapper[4605]: E1001 14:03:38.050061 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23411507-ae6e-438e-84e6-3505947e632b" containerName="cinder-scheduler" Oct 01 14:03:38 crc kubenswrapper[4605]: I1001 14:03:38.050077 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="23411507-ae6e-438e-84e6-3505947e632b" containerName="cinder-scheduler" Oct 01 14:03:38 crc kubenswrapper[4605]: E1001 14:03:38.050117 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c70fd20-00d7-4bbd-9f5d-67b674a92e84" containerName="barbican-api-log" Oct 01 14:03:38 crc kubenswrapper[4605]: I1001 14:03:38.050123 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c70fd20-00d7-4bbd-9f5d-67b674a92e84" containerName="barbican-api-log" Oct 01 14:03:38 crc kubenswrapper[4605]: E1001 14:03:38.050137 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23411507-ae6e-438e-84e6-3505947e632b" containerName="probe" Oct 01 14:03:38 crc kubenswrapper[4605]: I1001 14:03:38.050144 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="23411507-ae6e-438e-84e6-3505947e632b" containerName="probe" Oct 01 14:03:38 crc kubenswrapper[4605]: E1001 14:03:38.050160 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c70fd20-00d7-4bbd-9f5d-67b674a92e84" containerName="barbican-api" Oct 01 14:03:38 crc kubenswrapper[4605]: I1001 14:03:38.050166 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c70fd20-00d7-4bbd-9f5d-67b674a92e84" containerName="barbican-api" Oct 01 14:03:38 crc kubenswrapper[4605]: I1001 14:03:38.050344 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c70fd20-00d7-4bbd-9f5d-67b674a92e84" containerName="barbican-api-log" Oct 01 14:03:38 crc kubenswrapper[4605]: I1001 14:03:38.050355 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="23411507-ae6e-438e-84e6-3505947e632b" containerName="cinder-scheduler" Oct 01 14:03:38 crc kubenswrapper[4605]: I1001 14:03:38.050365 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="23411507-ae6e-438e-84e6-3505947e632b" containerName="probe" Oct 01 14:03:38 crc kubenswrapper[4605]: I1001 14:03:38.050378 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c70fd20-00d7-4bbd-9f5d-67b674a92e84" containerName="barbican-api" Oct 01 14:03:38 crc kubenswrapper[4605]: I1001 14:03:38.054177 4605 scope.go:117] "RemoveContainer" containerID="96e5f5adf67b8bfa82b4f32208b1182d7d6193535d085d70b443c588f528959b" Oct 01 14:03:38 crc kubenswrapper[4605]: I1001 14:03:38.059256 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-66b5967899-cv4c4" Oct 01 14:03:38 crc kubenswrapper[4605]: I1001 14:03:38.061399 4605 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23411507-ae6e-438e-84e6-3505947e632b-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 14:03:38 crc kubenswrapper[4605]: I1001 14:03:38.064058 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Oct 01 14:03:38 crc kubenswrapper[4605]: I1001 14:03:38.066741 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Oct 01 14:03:38 crc kubenswrapper[4605]: I1001 14:03:38.066803 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Oct 01 14:03:38 crc kubenswrapper[4605]: I1001 14:03:38.072244 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-66b5967899-cv4c4"] Oct 01 14:03:38 crc kubenswrapper[4605]: I1001 14:03:38.138427 4605 scope.go:117] "RemoveContainer" containerID="9a1f4c13caf7d4f48175afeb8a6cbcedd0022b8baeacde596ed624b53c65a6a7" Oct 01 14:03:38 crc kubenswrapper[4605]: E1001 14:03:38.144416 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9a1f4c13caf7d4f48175afeb8a6cbcedd0022b8baeacde596ed624b53c65a6a7\": container with ID starting with 9a1f4c13caf7d4f48175afeb8a6cbcedd0022b8baeacde596ed624b53c65a6a7 not found: ID does not exist" containerID="9a1f4c13caf7d4f48175afeb8a6cbcedd0022b8baeacde596ed624b53c65a6a7" Oct 01 14:03:38 crc kubenswrapper[4605]: I1001 14:03:38.144451 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9a1f4c13caf7d4f48175afeb8a6cbcedd0022b8baeacde596ed624b53c65a6a7"} err="failed to get container status \"9a1f4c13caf7d4f48175afeb8a6cbcedd0022b8baeacde596ed624b53c65a6a7\": rpc error: code = NotFound desc = could not find container \"9a1f4c13caf7d4f48175afeb8a6cbcedd0022b8baeacde596ed624b53c65a6a7\": container with ID starting with 9a1f4c13caf7d4f48175afeb8a6cbcedd0022b8baeacde596ed624b53c65a6a7 not found: ID does not exist" Oct 01 14:03:38 crc kubenswrapper[4605]: I1001 14:03:38.144473 4605 scope.go:117] "RemoveContainer" containerID="96e5f5adf67b8bfa82b4f32208b1182d7d6193535d085d70b443c588f528959b" Oct 01 14:03:38 crc kubenswrapper[4605]: E1001 14:03:38.145588 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"96e5f5adf67b8bfa82b4f32208b1182d7d6193535d085d70b443c588f528959b\": container with ID starting with 96e5f5adf67b8bfa82b4f32208b1182d7d6193535d085d70b443c588f528959b not found: ID does not exist" containerID="96e5f5adf67b8bfa82b4f32208b1182d7d6193535d085d70b443c588f528959b" Oct 01 14:03:38 crc kubenswrapper[4605]: I1001 14:03:38.145621 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"96e5f5adf67b8bfa82b4f32208b1182d7d6193535d085d70b443c588f528959b"} err="failed to get container status \"96e5f5adf67b8bfa82b4f32208b1182d7d6193535d085d70b443c588f528959b\": rpc error: code = NotFound desc = could not find container \"96e5f5adf67b8bfa82b4f32208b1182d7d6193535d085d70b443c588f528959b\": container with ID starting with 96e5f5adf67b8bfa82b4f32208b1182d7d6193535d085d70b443c588f528959b not found: ID does not exist" Oct 01 14:03:38 crc kubenswrapper[4605]: I1001 14:03:38.164506 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ae58440-10a5-44a6-94e8-89d112c67651-config-data\") pod \"swift-proxy-66b5967899-cv4c4\" (UID: \"7ae58440-10a5-44a6-94e8-89d112c67651\") " pod="openstack/swift-proxy-66b5967899-cv4c4" Oct 01 14:03:38 crc kubenswrapper[4605]: I1001 14:03:38.164563 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vdfpv\" (UniqueName: \"kubernetes.io/projected/7ae58440-10a5-44a6-94e8-89d112c67651-kube-api-access-vdfpv\") pod \"swift-proxy-66b5967899-cv4c4\" (UID: \"7ae58440-10a5-44a6-94e8-89d112c67651\") " pod="openstack/swift-proxy-66b5967899-cv4c4" Oct 01 14:03:38 crc kubenswrapper[4605]: I1001 14:03:38.164669 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ae58440-10a5-44a6-94e8-89d112c67651-public-tls-certs\") pod \"swift-proxy-66b5967899-cv4c4\" (UID: \"7ae58440-10a5-44a6-94e8-89d112c67651\") " pod="openstack/swift-proxy-66b5967899-cv4c4" Oct 01 14:03:38 crc kubenswrapper[4605]: I1001 14:03:38.164690 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7ae58440-10a5-44a6-94e8-89d112c67651-run-httpd\") pod \"swift-proxy-66b5967899-cv4c4\" (UID: \"7ae58440-10a5-44a6-94e8-89d112c67651\") " pod="openstack/swift-proxy-66b5967899-cv4c4" Oct 01 14:03:38 crc kubenswrapper[4605]: I1001 14:03:38.164709 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ae58440-10a5-44a6-94e8-89d112c67651-combined-ca-bundle\") pod \"swift-proxy-66b5967899-cv4c4\" (UID: \"7ae58440-10a5-44a6-94e8-89d112c67651\") " pod="openstack/swift-proxy-66b5967899-cv4c4" Oct 01 14:03:38 crc kubenswrapper[4605]: I1001 14:03:38.164725 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ae58440-10a5-44a6-94e8-89d112c67651-internal-tls-certs\") pod \"swift-proxy-66b5967899-cv4c4\" (UID: \"7ae58440-10a5-44a6-94e8-89d112c67651\") " pod="openstack/swift-proxy-66b5967899-cv4c4" Oct 01 14:03:38 crc kubenswrapper[4605]: I1001 14:03:38.164743 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7ae58440-10a5-44a6-94e8-89d112c67651-log-httpd\") pod \"swift-proxy-66b5967899-cv4c4\" (UID: \"7ae58440-10a5-44a6-94e8-89d112c67651\") " pod="openstack/swift-proxy-66b5967899-cv4c4" Oct 01 14:03:38 crc kubenswrapper[4605]: I1001 14:03:38.164758 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/7ae58440-10a5-44a6-94e8-89d112c67651-etc-swift\") pod \"swift-proxy-66b5967899-cv4c4\" (UID: \"7ae58440-10a5-44a6-94e8-89d112c67651\") " pod="openstack/swift-proxy-66b5967899-cv4c4" Oct 01 14:03:38 crc kubenswrapper[4605]: I1001 14:03:38.168627 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 01 14:03:38 crc kubenswrapper[4605]: I1001 14:03:38.176820 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 01 14:03:38 crc kubenswrapper[4605]: I1001 14:03:38.195048 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Oct 01 14:03:38 crc kubenswrapper[4605]: I1001 14:03:38.196612 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 01 14:03:38 crc kubenswrapper[4605]: I1001 14:03:38.203828 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Oct 01 14:03:38 crc kubenswrapper[4605]: I1001 14:03:38.217959 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 01 14:03:38 crc kubenswrapper[4605]: I1001 14:03:38.267186 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/675a2ec1-82ad-4b20-a077-d8d427108ce7-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"675a2ec1-82ad-4b20-a077-d8d427108ce7\") " pod="openstack/cinder-scheduler-0" Oct 01 14:03:38 crc kubenswrapper[4605]: I1001 14:03:38.267247 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-56fw7\" (UniqueName: \"kubernetes.io/projected/675a2ec1-82ad-4b20-a077-d8d427108ce7-kube-api-access-56fw7\") pod \"cinder-scheduler-0\" (UID: \"675a2ec1-82ad-4b20-a077-d8d427108ce7\") " pod="openstack/cinder-scheduler-0" Oct 01 14:03:38 crc kubenswrapper[4605]: I1001 14:03:38.267303 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/675a2ec1-82ad-4b20-a077-d8d427108ce7-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"675a2ec1-82ad-4b20-a077-d8d427108ce7\") " pod="openstack/cinder-scheduler-0" Oct 01 14:03:38 crc kubenswrapper[4605]: I1001 14:03:38.267330 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ae58440-10a5-44a6-94e8-89d112c67651-public-tls-certs\") pod \"swift-proxy-66b5967899-cv4c4\" (UID: \"7ae58440-10a5-44a6-94e8-89d112c67651\") " pod="openstack/swift-proxy-66b5967899-cv4c4" Oct 01 14:03:38 crc kubenswrapper[4605]: I1001 14:03:38.267352 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7ae58440-10a5-44a6-94e8-89d112c67651-run-httpd\") pod \"swift-proxy-66b5967899-cv4c4\" (UID: \"7ae58440-10a5-44a6-94e8-89d112c67651\") " pod="openstack/swift-proxy-66b5967899-cv4c4" Oct 01 14:03:38 crc kubenswrapper[4605]: I1001 14:03:38.267370 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/675a2ec1-82ad-4b20-a077-d8d427108ce7-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"675a2ec1-82ad-4b20-a077-d8d427108ce7\") " pod="openstack/cinder-scheduler-0" Oct 01 14:03:38 crc kubenswrapper[4605]: I1001 14:03:38.267386 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ae58440-10a5-44a6-94e8-89d112c67651-combined-ca-bundle\") pod \"swift-proxy-66b5967899-cv4c4\" (UID: \"7ae58440-10a5-44a6-94e8-89d112c67651\") " pod="openstack/swift-proxy-66b5967899-cv4c4" Oct 01 14:03:38 crc kubenswrapper[4605]: I1001 14:03:38.267402 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ae58440-10a5-44a6-94e8-89d112c67651-internal-tls-certs\") pod \"swift-proxy-66b5967899-cv4c4\" (UID: \"7ae58440-10a5-44a6-94e8-89d112c67651\") " pod="openstack/swift-proxy-66b5967899-cv4c4" Oct 01 14:03:38 crc kubenswrapper[4605]: I1001 14:03:38.267421 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7ae58440-10a5-44a6-94e8-89d112c67651-log-httpd\") pod \"swift-proxy-66b5967899-cv4c4\" (UID: \"7ae58440-10a5-44a6-94e8-89d112c67651\") " pod="openstack/swift-proxy-66b5967899-cv4c4" Oct 01 14:03:38 crc kubenswrapper[4605]: I1001 14:03:38.267437 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/7ae58440-10a5-44a6-94e8-89d112c67651-etc-swift\") pod \"swift-proxy-66b5967899-cv4c4\" (UID: \"7ae58440-10a5-44a6-94e8-89d112c67651\") " pod="openstack/swift-proxy-66b5967899-cv4c4" Oct 01 14:03:38 crc kubenswrapper[4605]: I1001 14:03:38.267475 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ae58440-10a5-44a6-94e8-89d112c67651-config-data\") pod \"swift-proxy-66b5967899-cv4c4\" (UID: \"7ae58440-10a5-44a6-94e8-89d112c67651\") " pod="openstack/swift-proxy-66b5967899-cv4c4" Oct 01 14:03:38 crc kubenswrapper[4605]: I1001 14:03:38.267499 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vdfpv\" (UniqueName: \"kubernetes.io/projected/7ae58440-10a5-44a6-94e8-89d112c67651-kube-api-access-vdfpv\") pod \"swift-proxy-66b5967899-cv4c4\" (UID: \"7ae58440-10a5-44a6-94e8-89d112c67651\") " pod="openstack/swift-proxy-66b5967899-cv4c4" Oct 01 14:03:38 crc kubenswrapper[4605]: I1001 14:03:38.267517 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/675a2ec1-82ad-4b20-a077-d8d427108ce7-config-data\") pod \"cinder-scheduler-0\" (UID: \"675a2ec1-82ad-4b20-a077-d8d427108ce7\") " pod="openstack/cinder-scheduler-0" Oct 01 14:03:38 crc kubenswrapper[4605]: I1001 14:03:38.267566 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/675a2ec1-82ad-4b20-a077-d8d427108ce7-scripts\") pod \"cinder-scheduler-0\" (UID: \"675a2ec1-82ad-4b20-a077-d8d427108ce7\") " pod="openstack/cinder-scheduler-0" Oct 01 14:03:38 crc kubenswrapper[4605]: I1001 14:03:38.268539 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7ae58440-10a5-44a6-94e8-89d112c67651-run-httpd\") pod \"swift-proxy-66b5967899-cv4c4\" (UID: \"7ae58440-10a5-44a6-94e8-89d112c67651\") " pod="openstack/swift-proxy-66b5967899-cv4c4" Oct 01 14:03:38 crc kubenswrapper[4605]: I1001 14:03:38.268568 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7ae58440-10a5-44a6-94e8-89d112c67651-log-httpd\") pod \"swift-proxy-66b5967899-cv4c4\" (UID: \"7ae58440-10a5-44a6-94e8-89d112c67651\") " pod="openstack/swift-proxy-66b5967899-cv4c4" Oct 01 14:03:38 crc kubenswrapper[4605]: I1001 14:03:38.274587 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ae58440-10a5-44a6-94e8-89d112c67651-config-data\") pod \"swift-proxy-66b5967899-cv4c4\" (UID: \"7ae58440-10a5-44a6-94e8-89d112c67651\") " pod="openstack/swift-proxy-66b5967899-cv4c4" Oct 01 14:03:38 crc kubenswrapper[4605]: I1001 14:03:38.275135 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ae58440-10a5-44a6-94e8-89d112c67651-internal-tls-certs\") pod \"swift-proxy-66b5967899-cv4c4\" (UID: \"7ae58440-10a5-44a6-94e8-89d112c67651\") " pod="openstack/swift-proxy-66b5967899-cv4c4" Oct 01 14:03:38 crc kubenswrapper[4605]: I1001 14:03:38.280790 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ae58440-10a5-44a6-94e8-89d112c67651-combined-ca-bundle\") pod \"swift-proxy-66b5967899-cv4c4\" (UID: \"7ae58440-10a5-44a6-94e8-89d112c67651\") " pod="openstack/swift-proxy-66b5967899-cv4c4" Oct 01 14:03:38 crc kubenswrapper[4605]: I1001 14:03:38.281001 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ae58440-10a5-44a6-94e8-89d112c67651-public-tls-certs\") pod \"swift-proxy-66b5967899-cv4c4\" (UID: \"7ae58440-10a5-44a6-94e8-89d112c67651\") " pod="openstack/swift-proxy-66b5967899-cv4c4" Oct 01 14:03:38 crc kubenswrapper[4605]: I1001 14:03:38.281282 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/7ae58440-10a5-44a6-94e8-89d112c67651-etc-swift\") pod \"swift-proxy-66b5967899-cv4c4\" (UID: \"7ae58440-10a5-44a6-94e8-89d112c67651\") " pod="openstack/swift-proxy-66b5967899-cv4c4" Oct 01 14:03:38 crc kubenswrapper[4605]: I1001 14:03:38.287799 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vdfpv\" (UniqueName: \"kubernetes.io/projected/7ae58440-10a5-44a6-94e8-89d112c67651-kube-api-access-vdfpv\") pod \"swift-proxy-66b5967899-cv4c4\" (UID: \"7ae58440-10a5-44a6-94e8-89d112c67651\") " pod="openstack/swift-proxy-66b5967899-cv4c4" Oct 01 14:03:38 crc kubenswrapper[4605]: I1001 14:03:38.368984 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/675a2ec1-82ad-4b20-a077-d8d427108ce7-scripts\") pod \"cinder-scheduler-0\" (UID: \"675a2ec1-82ad-4b20-a077-d8d427108ce7\") " pod="openstack/cinder-scheduler-0" Oct 01 14:03:38 crc kubenswrapper[4605]: I1001 14:03:38.369031 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/675a2ec1-82ad-4b20-a077-d8d427108ce7-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"675a2ec1-82ad-4b20-a077-d8d427108ce7\") " pod="openstack/cinder-scheduler-0" Oct 01 14:03:38 crc kubenswrapper[4605]: I1001 14:03:38.369063 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-56fw7\" (UniqueName: \"kubernetes.io/projected/675a2ec1-82ad-4b20-a077-d8d427108ce7-kube-api-access-56fw7\") pod \"cinder-scheduler-0\" (UID: \"675a2ec1-82ad-4b20-a077-d8d427108ce7\") " pod="openstack/cinder-scheduler-0" Oct 01 14:03:38 crc kubenswrapper[4605]: I1001 14:03:38.369139 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/675a2ec1-82ad-4b20-a077-d8d427108ce7-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"675a2ec1-82ad-4b20-a077-d8d427108ce7\") " pod="openstack/cinder-scheduler-0" Oct 01 14:03:38 crc kubenswrapper[4605]: I1001 14:03:38.369169 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/675a2ec1-82ad-4b20-a077-d8d427108ce7-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"675a2ec1-82ad-4b20-a077-d8d427108ce7\") " pod="openstack/cinder-scheduler-0" Oct 01 14:03:38 crc kubenswrapper[4605]: I1001 14:03:38.369231 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/675a2ec1-82ad-4b20-a077-d8d427108ce7-config-data\") pod \"cinder-scheduler-0\" (UID: \"675a2ec1-82ad-4b20-a077-d8d427108ce7\") " pod="openstack/cinder-scheduler-0" Oct 01 14:03:38 crc kubenswrapper[4605]: I1001 14:03:38.370748 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/675a2ec1-82ad-4b20-a077-d8d427108ce7-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"675a2ec1-82ad-4b20-a077-d8d427108ce7\") " pod="openstack/cinder-scheduler-0" Oct 01 14:03:38 crc kubenswrapper[4605]: I1001 14:03:38.374625 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/675a2ec1-82ad-4b20-a077-d8d427108ce7-scripts\") pod \"cinder-scheduler-0\" (UID: \"675a2ec1-82ad-4b20-a077-d8d427108ce7\") " pod="openstack/cinder-scheduler-0" Oct 01 14:03:38 crc kubenswrapper[4605]: I1001 14:03:38.375779 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/675a2ec1-82ad-4b20-a077-d8d427108ce7-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"675a2ec1-82ad-4b20-a077-d8d427108ce7\") " pod="openstack/cinder-scheduler-0" Oct 01 14:03:38 crc kubenswrapper[4605]: I1001 14:03:38.376215 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/675a2ec1-82ad-4b20-a077-d8d427108ce7-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"675a2ec1-82ad-4b20-a077-d8d427108ce7\") " pod="openstack/cinder-scheduler-0" Oct 01 14:03:38 crc kubenswrapper[4605]: I1001 14:03:38.383306 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/675a2ec1-82ad-4b20-a077-d8d427108ce7-config-data\") pod \"cinder-scheduler-0\" (UID: \"675a2ec1-82ad-4b20-a077-d8d427108ce7\") " pod="openstack/cinder-scheduler-0" Oct 01 14:03:38 crc kubenswrapper[4605]: I1001 14:03:38.391073 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-56fw7\" (UniqueName: \"kubernetes.io/projected/675a2ec1-82ad-4b20-a077-d8d427108ce7-kube-api-access-56fw7\") pod \"cinder-scheduler-0\" (UID: \"675a2ec1-82ad-4b20-a077-d8d427108ce7\") " pod="openstack/cinder-scheduler-0" Oct 01 14:03:38 crc kubenswrapper[4605]: I1001 14:03:38.434442 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-66b5967899-cv4c4" Oct 01 14:03:38 crc kubenswrapper[4605]: I1001 14:03:38.511725 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 01 14:03:38 crc kubenswrapper[4605]: I1001 14:03:38.857909 4605 generic.go:334] "Generic (PLEG): container finished" podID="45659557-27c7-4a59-afbf-27c09718d6f7" containerID="d764c8bddc16be2af1844bea8e0e18cf2a336f52a5afde15acccbe2700912180" exitCode=0 Oct 01 14:03:38 crc kubenswrapper[4605]: I1001 14:03:38.858193 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"45659557-27c7-4a59-afbf-27c09718d6f7","Type":"ContainerDied","Data":"d764c8bddc16be2af1844bea8e0e18cf2a336f52a5afde15acccbe2700912180"} Oct 01 14:03:39 crc kubenswrapper[4605]: I1001 14:03:39.091988 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-66b5967899-cv4c4"] Oct 01 14:03:39 crc kubenswrapper[4605]: I1001 14:03:39.176303 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 01 14:03:39 crc kubenswrapper[4605]: I1001 14:03:39.366162 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 14:03:39 crc kubenswrapper[4605]: I1001 14:03:39.506019 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/45659557-27c7-4a59-afbf-27c09718d6f7-run-httpd\") pod \"45659557-27c7-4a59-afbf-27c09718d6f7\" (UID: \"45659557-27c7-4a59-afbf-27c09718d6f7\") " Oct 01 14:03:39 crc kubenswrapper[4605]: I1001 14:03:39.506077 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5bd76\" (UniqueName: \"kubernetes.io/projected/45659557-27c7-4a59-afbf-27c09718d6f7-kube-api-access-5bd76\") pod \"45659557-27c7-4a59-afbf-27c09718d6f7\" (UID: \"45659557-27c7-4a59-afbf-27c09718d6f7\") " Oct 01 14:03:39 crc kubenswrapper[4605]: I1001 14:03:39.506146 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/45659557-27c7-4a59-afbf-27c09718d6f7-config-data\") pod \"45659557-27c7-4a59-afbf-27c09718d6f7\" (UID: \"45659557-27c7-4a59-afbf-27c09718d6f7\") " Oct 01 14:03:39 crc kubenswrapper[4605]: I1001 14:03:39.506259 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/45659557-27c7-4a59-afbf-27c09718d6f7-sg-core-conf-yaml\") pod \"45659557-27c7-4a59-afbf-27c09718d6f7\" (UID: \"45659557-27c7-4a59-afbf-27c09718d6f7\") " Oct 01 14:03:39 crc kubenswrapper[4605]: I1001 14:03:39.506311 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45659557-27c7-4a59-afbf-27c09718d6f7-combined-ca-bundle\") pod \"45659557-27c7-4a59-afbf-27c09718d6f7\" (UID: \"45659557-27c7-4a59-afbf-27c09718d6f7\") " Oct 01 14:03:39 crc kubenswrapper[4605]: I1001 14:03:39.506344 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/45659557-27c7-4a59-afbf-27c09718d6f7-log-httpd\") pod \"45659557-27c7-4a59-afbf-27c09718d6f7\" (UID: \"45659557-27c7-4a59-afbf-27c09718d6f7\") " Oct 01 14:03:39 crc kubenswrapper[4605]: I1001 14:03:39.506370 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/45659557-27c7-4a59-afbf-27c09718d6f7-scripts\") pod \"45659557-27c7-4a59-afbf-27c09718d6f7\" (UID: \"45659557-27c7-4a59-afbf-27c09718d6f7\") " Oct 01 14:03:39 crc kubenswrapper[4605]: I1001 14:03:39.510694 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/45659557-27c7-4a59-afbf-27c09718d6f7-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "45659557-27c7-4a59-afbf-27c09718d6f7" (UID: "45659557-27c7-4a59-afbf-27c09718d6f7"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:03:39 crc kubenswrapper[4605]: I1001 14:03:39.511110 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/45659557-27c7-4a59-afbf-27c09718d6f7-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "45659557-27c7-4a59-afbf-27c09718d6f7" (UID: "45659557-27c7-4a59-afbf-27c09718d6f7"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:03:39 crc kubenswrapper[4605]: I1001 14:03:39.516226 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/45659557-27c7-4a59-afbf-27c09718d6f7-scripts" (OuterVolumeSpecName: "scripts") pod "45659557-27c7-4a59-afbf-27c09718d6f7" (UID: "45659557-27c7-4a59-afbf-27c09718d6f7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:03:39 crc kubenswrapper[4605]: I1001 14:03:39.523547 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/45659557-27c7-4a59-afbf-27c09718d6f7-kube-api-access-5bd76" (OuterVolumeSpecName: "kube-api-access-5bd76") pod "45659557-27c7-4a59-afbf-27c09718d6f7" (UID: "45659557-27c7-4a59-afbf-27c09718d6f7"). InnerVolumeSpecName "kube-api-access-5bd76". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:03:39 crc kubenswrapper[4605]: I1001 14:03:39.552258 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/45659557-27c7-4a59-afbf-27c09718d6f7-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "45659557-27c7-4a59-afbf-27c09718d6f7" (UID: "45659557-27c7-4a59-afbf-27c09718d6f7"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:03:39 crc kubenswrapper[4605]: I1001 14:03:39.608651 4605 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/45659557-27c7-4a59-afbf-27c09718d6f7-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 01 14:03:39 crc kubenswrapper[4605]: I1001 14:03:39.608682 4605 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/45659557-27c7-4a59-afbf-27c09718d6f7-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 01 14:03:39 crc kubenswrapper[4605]: I1001 14:03:39.608691 4605 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/45659557-27c7-4a59-afbf-27c09718d6f7-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 14:03:39 crc kubenswrapper[4605]: I1001 14:03:39.608700 4605 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/45659557-27c7-4a59-afbf-27c09718d6f7-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 01 14:03:39 crc kubenswrapper[4605]: I1001 14:03:39.608708 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5bd76\" (UniqueName: \"kubernetes.io/projected/45659557-27c7-4a59-afbf-27c09718d6f7-kube-api-access-5bd76\") on node \"crc\" DevicePath \"\"" Oct 01 14:03:39 crc kubenswrapper[4605]: I1001 14:03:39.665784 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/45659557-27c7-4a59-afbf-27c09718d6f7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "45659557-27c7-4a59-afbf-27c09718d6f7" (UID: "45659557-27c7-4a59-afbf-27c09718d6f7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:03:39 crc kubenswrapper[4605]: I1001 14:03:39.710325 4605 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45659557-27c7-4a59-afbf-27c09718d6f7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 14:03:39 crc kubenswrapper[4605]: I1001 14:03:39.725484 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/45659557-27c7-4a59-afbf-27c09718d6f7-config-data" (OuterVolumeSpecName: "config-data") pod "45659557-27c7-4a59-afbf-27c09718d6f7" (UID: "45659557-27c7-4a59-afbf-27c09718d6f7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:03:39 crc kubenswrapper[4605]: I1001 14:03:39.811962 4605 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/45659557-27c7-4a59-afbf-27c09718d6f7-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 14:03:39 crc kubenswrapper[4605]: I1001 14:03:39.894477 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-66b5967899-cv4c4" event={"ID":"7ae58440-10a5-44a6-94e8-89d112c67651","Type":"ContainerStarted","Data":"f26c532384b91c2f4868466770eb0da9987992617ad489b191382f46e30bc1a2"} Oct 01 14:03:39 crc kubenswrapper[4605]: I1001 14:03:39.894515 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-66b5967899-cv4c4" event={"ID":"7ae58440-10a5-44a6-94e8-89d112c67651","Type":"ContainerStarted","Data":"ee89a53c0d85a8fef1bf6adba9ea7d6a1d096a17a51398184fb963c36d0f79f7"} Oct 01 14:03:39 crc kubenswrapper[4605]: I1001 14:03:39.910439 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 14:03:39 crc kubenswrapper[4605]: I1001 14:03:39.910439 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"45659557-27c7-4a59-afbf-27c09718d6f7","Type":"ContainerDied","Data":"6787e3caa32fcfa70524cd6baae05e92da1a75862c82dc3bebb87e5d2ff70e52"} Oct 01 14:03:39 crc kubenswrapper[4605]: I1001 14:03:39.910599 4605 scope.go:117] "RemoveContainer" containerID="a7165cd54ef4832d478f584e30401a0459bd23248c5f67491ba5551bbb78b95d" Oct 01 14:03:39 crc kubenswrapper[4605]: I1001 14:03:39.913859 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"675a2ec1-82ad-4b20-a077-d8d427108ce7","Type":"ContainerStarted","Data":"373988cd33c872a46a36539a0869663ae8a63c448901feb7c90f3ffaf369a399"} Oct 01 14:03:39 crc kubenswrapper[4605]: I1001 14:03:39.943243 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="23411507-ae6e-438e-84e6-3505947e632b" path="/var/lib/kubelet/pods/23411507-ae6e-438e-84e6-3505947e632b/volumes" Oct 01 14:03:40 crc kubenswrapper[4605]: I1001 14:03:40.033158 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 01 14:03:40 crc kubenswrapper[4605]: I1001 14:03:40.040160 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 01 14:03:40 crc kubenswrapper[4605]: I1001 14:03:40.052280 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 01 14:03:40 crc kubenswrapper[4605]: E1001 14:03:40.052630 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45659557-27c7-4a59-afbf-27c09718d6f7" containerName="ceilometer-central-agent" Oct 01 14:03:40 crc kubenswrapper[4605]: I1001 14:03:40.052646 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="45659557-27c7-4a59-afbf-27c09718d6f7" containerName="ceilometer-central-agent" Oct 01 14:03:40 crc kubenswrapper[4605]: E1001 14:03:40.052665 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45659557-27c7-4a59-afbf-27c09718d6f7" containerName="proxy-httpd" Oct 01 14:03:40 crc kubenswrapper[4605]: I1001 14:03:40.052671 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="45659557-27c7-4a59-afbf-27c09718d6f7" containerName="proxy-httpd" Oct 01 14:03:40 crc kubenswrapper[4605]: E1001 14:03:40.052697 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45659557-27c7-4a59-afbf-27c09718d6f7" containerName="sg-core" Oct 01 14:03:40 crc kubenswrapper[4605]: I1001 14:03:40.052703 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="45659557-27c7-4a59-afbf-27c09718d6f7" containerName="sg-core" Oct 01 14:03:40 crc kubenswrapper[4605]: E1001 14:03:40.052711 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45659557-27c7-4a59-afbf-27c09718d6f7" containerName="ceilometer-notification-agent" Oct 01 14:03:40 crc kubenswrapper[4605]: I1001 14:03:40.052717 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="45659557-27c7-4a59-afbf-27c09718d6f7" containerName="ceilometer-notification-agent" Oct 01 14:03:40 crc kubenswrapper[4605]: I1001 14:03:40.052872 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="45659557-27c7-4a59-afbf-27c09718d6f7" containerName="proxy-httpd" Oct 01 14:03:40 crc kubenswrapper[4605]: I1001 14:03:40.052884 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="45659557-27c7-4a59-afbf-27c09718d6f7" containerName="ceilometer-notification-agent" Oct 01 14:03:40 crc kubenswrapper[4605]: I1001 14:03:40.052896 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="45659557-27c7-4a59-afbf-27c09718d6f7" containerName="sg-core" Oct 01 14:03:40 crc kubenswrapper[4605]: I1001 14:03:40.052911 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="45659557-27c7-4a59-afbf-27c09718d6f7" containerName="ceilometer-central-agent" Oct 01 14:03:40 crc kubenswrapper[4605]: I1001 14:03:40.055161 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 14:03:40 crc kubenswrapper[4605]: I1001 14:03:40.059588 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 01 14:03:40 crc kubenswrapper[4605]: I1001 14:03:40.060031 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 01 14:03:40 crc kubenswrapper[4605]: I1001 14:03:40.069629 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 01 14:03:40 crc kubenswrapper[4605]: I1001 14:03:40.118324 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c601edbd-d09b-48b1-bc0b-40c01b76468c-log-httpd\") pod \"ceilometer-0\" (UID: \"c601edbd-d09b-48b1-bc0b-40c01b76468c\") " pod="openstack/ceilometer-0" Oct 01 14:03:40 crc kubenswrapper[4605]: I1001 14:03:40.118686 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c601edbd-d09b-48b1-bc0b-40c01b76468c-scripts\") pod \"ceilometer-0\" (UID: \"c601edbd-d09b-48b1-bc0b-40c01b76468c\") " pod="openstack/ceilometer-0" Oct 01 14:03:40 crc kubenswrapper[4605]: I1001 14:03:40.118811 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c601edbd-d09b-48b1-bc0b-40c01b76468c-config-data\") pod \"ceilometer-0\" (UID: \"c601edbd-d09b-48b1-bc0b-40c01b76468c\") " pod="openstack/ceilometer-0" Oct 01 14:03:40 crc kubenswrapper[4605]: I1001 14:03:40.118936 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c601edbd-d09b-48b1-bc0b-40c01b76468c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c601edbd-d09b-48b1-bc0b-40c01b76468c\") " pod="openstack/ceilometer-0" Oct 01 14:03:40 crc kubenswrapper[4605]: I1001 14:03:40.119025 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z9rhk\" (UniqueName: \"kubernetes.io/projected/c601edbd-d09b-48b1-bc0b-40c01b76468c-kube-api-access-z9rhk\") pod \"ceilometer-0\" (UID: \"c601edbd-d09b-48b1-bc0b-40c01b76468c\") " pod="openstack/ceilometer-0" Oct 01 14:03:40 crc kubenswrapper[4605]: I1001 14:03:40.119129 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c601edbd-d09b-48b1-bc0b-40c01b76468c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c601edbd-d09b-48b1-bc0b-40c01b76468c\") " pod="openstack/ceilometer-0" Oct 01 14:03:40 crc kubenswrapper[4605]: I1001 14:03:40.119225 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c601edbd-d09b-48b1-bc0b-40c01b76468c-run-httpd\") pod \"ceilometer-0\" (UID: \"c601edbd-d09b-48b1-bc0b-40c01b76468c\") " pod="openstack/ceilometer-0" Oct 01 14:03:40 crc kubenswrapper[4605]: I1001 14:03:40.220359 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c601edbd-d09b-48b1-bc0b-40c01b76468c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c601edbd-d09b-48b1-bc0b-40c01b76468c\") " pod="openstack/ceilometer-0" Oct 01 14:03:40 crc kubenswrapper[4605]: I1001 14:03:40.220835 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z9rhk\" (UniqueName: \"kubernetes.io/projected/c601edbd-d09b-48b1-bc0b-40c01b76468c-kube-api-access-z9rhk\") pod \"ceilometer-0\" (UID: \"c601edbd-d09b-48b1-bc0b-40c01b76468c\") " pod="openstack/ceilometer-0" Oct 01 14:03:40 crc kubenswrapper[4605]: I1001 14:03:40.220942 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c601edbd-d09b-48b1-bc0b-40c01b76468c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c601edbd-d09b-48b1-bc0b-40c01b76468c\") " pod="openstack/ceilometer-0" Oct 01 14:03:40 crc kubenswrapper[4605]: I1001 14:03:40.221149 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c601edbd-d09b-48b1-bc0b-40c01b76468c-run-httpd\") pod \"ceilometer-0\" (UID: \"c601edbd-d09b-48b1-bc0b-40c01b76468c\") " pod="openstack/ceilometer-0" Oct 01 14:03:40 crc kubenswrapper[4605]: I1001 14:03:40.221264 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c601edbd-d09b-48b1-bc0b-40c01b76468c-log-httpd\") pod \"ceilometer-0\" (UID: \"c601edbd-d09b-48b1-bc0b-40c01b76468c\") " pod="openstack/ceilometer-0" Oct 01 14:03:40 crc kubenswrapper[4605]: I1001 14:03:40.221339 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c601edbd-d09b-48b1-bc0b-40c01b76468c-scripts\") pod \"ceilometer-0\" (UID: \"c601edbd-d09b-48b1-bc0b-40c01b76468c\") " pod="openstack/ceilometer-0" Oct 01 14:03:40 crc kubenswrapper[4605]: I1001 14:03:40.221831 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c601edbd-d09b-48b1-bc0b-40c01b76468c-log-httpd\") pod \"ceilometer-0\" (UID: \"c601edbd-d09b-48b1-bc0b-40c01b76468c\") " pod="openstack/ceilometer-0" Oct 01 14:03:40 crc kubenswrapper[4605]: I1001 14:03:40.221992 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c601edbd-d09b-48b1-bc0b-40c01b76468c-config-data\") pod \"ceilometer-0\" (UID: \"c601edbd-d09b-48b1-bc0b-40c01b76468c\") " pod="openstack/ceilometer-0" Oct 01 14:03:40 crc kubenswrapper[4605]: I1001 14:03:40.222000 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c601edbd-d09b-48b1-bc0b-40c01b76468c-run-httpd\") pod \"ceilometer-0\" (UID: \"c601edbd-d09b-48b1-bc0b-40c01b76468c\") " pod="openstack/ceilometer-0" Oct 01 14:03:40 crc kubenswrapper[4605]: I1001 14:03:40.224638 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c601edbd-d09b-48b1-bc0b-40c01b76468c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c601edbd-d09b-48b1-bc0b-40c01b76468c\") " pod="openstack/ceilometer-0" Oct 01 14:03:40 crc kubenswrapper[4605]: I1001 14:03:40.225655 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c601edbd-d09b-48b1-bc0b-40c01b76468c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c601edbd-d09b-48b1-bc0b-40c01b76468c\") " pod="openstack/ceilometer-0" Oct 01 14:03:40 crc kubenswrapper[4605]: I1001 14:03:40.228009 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c601edbd-d09b-48b1-bc0b-40c01b76468c-config-data\") pod \"ceilometer-0\" (UID: \"c601edbd-d09b-48b1-bc0b-40c01b76468c\") " pod="openstack/ceilometer-0" Oct 01 14:03:40 crc kubenswrapper[4605]: I1001 14:03:40.228509 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c601edbd-d09b-48b1-bc0b-40c01b76468c-scripts\") pod \"ceilometer-0\" (UID: \"c601edbd-d09b-48b1-bc0b-40c01b76468c\") " pod="openstack/ceilometer-0" Oct 01 14:03:40 crc kubenswrapper[4605]: I1001 14:03:40.240782 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z9rhk\" (UniqueName: \"kubernetes.io/projected/c601edbd-d09b-48b1-bc0b-40c01b76468c-kube-api-access-z9rhk\") pod \"ceilometer-0\" (UID: \"c601edbd-d09b-48b1-bc0b-40c01b76468c\") " pod="openstack/ceilometer-0" Oct 01 14:03:40 crc kubenswrapper[4605]: I1001 14:03:40.386972 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 14:03:40 crc kubenswrapper[4605]: I1001 14:03:40.927263 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"675a2ec1-82ad-4b20-a077-d8d427108ce7","Type":"ContainerStarted","Data":"b4064bd06820b224f3d4f7ff9ed0add3df915f130fad2daa354dea2f3a8648d9"} Oct 01 14:03:41 crc kubenswrapper[4605]: I1001 14:03:41.408927 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Oct 01 14:03:41 crc kubenswrapper[4605]: I1001 14:03:41.942195 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="45659557-27c7-4a59-afbf-27c09718d6f7" path="/var/lib/kubelet/pods/45659557-27c7-4a59-afbf-27c09718d6f7/volumes" Oct 01 14:03:43 crc kubenswrapper[4605]: I1001 14:03:43.525786 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 01 14:03:43 crc kubenswrapper[4605]: I1001 14:03:43.528007 4605 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-749477d64b-5dpnm" podUID="19af3ac5-8b1a-4301-88a2-96ae085ee9e0" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.150:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.150:8443: connect: connection refused" Oct 01 14:03:43 crc kubenswrapper[4605]: I1001 14:03:43.528120 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-749477d64b-5dpnm" Oct 01 14:03:47 crc kubenswrapper[4605]: I1001 14:03:47.611646 4605 scope.go:117] "RemoveContainer" containerID="9071895aa65ccbe403e7e2bf405c882b4c217f9992dc5831303042425173982a" Oct 01 14:03:47 crc kubenswrapper[4605]: I1001 14:03:47.799660 4605 scope.go:117] "RemoveContainer" containerID="d764c8bddc16be2af1844bea8e0e18cf2a336f52a5afde15acccbe2700912180" Oct 01 14:03:47 crc kubenswrapper[4605]: I1001 14:03:47.932281 4605 scope.go:117] "RemoveContainer" containerID="ec311de54a1fe907b20987b92ee0d73cf7dd1c8ba41b14d2e6aa6f047f0871a6" Oct 01 14:03:48 crc kubenswrapper[4605]: I1001 14:03:48.061511 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-66b5967899-cv4c4" event={"ID":"7ae58440-10a5-44a6-94e8-89d112c67651","Type":"ContainerStarted","Data":"30128fe6162c7e88fcbdc853cf6ee36148bc7ee55e0606cdc08e0142ea54c12d"} Oct 01 14:03:48 crc kubenswrapper[4605]: I1001 14:03:48.061869 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-66b5967899-cv4c4" Oct 01 14:03:48 crc kubenswrapper[4605]: I1001 14:03:48.211551 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-66b5967899-cv4c4" podStartSLOduration=10.211530406 podStartE2EDuration="10.211530406s" podCreationTimestamp="2025-10-01 14:03:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 14:03:48.083995669 +0000 UTC m=+1150.827971867" watchObservedRunningTime="2025-10-01 14:03:48.211530406 +0000 UTC m=+1150.955506604" Oct 01 14:03:48 crc kubenswrapper[4605]: I1001 14:03:48.218734 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 01 14:03:48 crc kubenswrapper[4605]: I1001 14:03:48.434877 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-66b5967899-cv4c4" Oct 01 14:03:48 crc kubenswrapper[4605]: I1001 14:03:48.443863 4605 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/swift-proxy-66b5967899-cv4c4" podUID="7ae58440-10a5-44a6-94e8-89d112c67651" containerName="proxy-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 503" Oct 01 14:03:49 crc kubenswrapper[4605]: I1001 14:03:49.134033 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"58264b9f-ddeb-466d-94aa-536c1a381308","Type":"ContainerStarted","Data":"2420c5609f5e87a46bf68e48ab5abfa89660b40c61d806f1ab626f60c2f351ab"} Oct 01 14:03:49 crc kubenswrapper[4605]: I1001 14:03:49.141049 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"675a2ec1-82ad-4b20-a077-d8d427108ce7","Type":"ContainerStarted","Data":"ba9ce34cf4f745d41824ee64e7672823d367043f808b58917e16e72fe7f5bcc2"} Oct 01 14:03:49 crc kubenswrapper[4605]: I1001 14:03:49.148349 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c601edbd-d09b-48b1-bc0b-40c01b76468c","Type":"ContainerStarted","Data":"aaa16ea88fffe7c66bbc655644ce223b031f9f8d10ba1fcaf42752892fec8978"} Oct 01 14:03:49 crc kubenswrapper[4605]: I1001 14:03:49.148407 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c601edbd-d09b-48b1-bc0b-40c01b76468c","Type":"ContainerStarted","Data":"60fdd558c7ddba8c140a4ea611ada8920954a7a304a4d18cf0f2a1ef388c341d"} Oct 01 14:03:49 crc kubenswrapper[4605]: I1001 14:03:49.152252 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.663913633 podStartE2EDuration="18.15223359s" podCreationTimestamp="2025-10-01 14:03:31 +0000 UTC" firstStartedPulling="2025-10-01 14:03:32.280020335 +0000 UTC m=+1135.023996533" lastFinishedPulling="2025-10-01 14:03:47.768340292 +0000 UTC m=+1150.512316490" observedRunningTime="2025-10-01 14:03:49.149663245 +0000 UTC m=+1151.893639443" watchObservedRunningTime="2025-10-01 14:03:49.15223359 +0000 UTC m=+1151.896209798" Oct 01 14:03:49 crc kubenswrapper[4605]: I1001 14:03:49.157970 4605 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/swift-proxy-66b5967899-cv4c4" podUID="7ae58440-10a5-44a6-94e8-89d112c67651" containerName="proxy-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 503" Oct 01 14:03:49 crc kubenswrapper[4605]: I1001 14:03:49.179645 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=11.179627463 podStartE2EDuration="11.179627463s" podCreationTimestamp="2025-10-01 14:03:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 14:03:49.173397166 +0000 UTC m=+1151.917373384" watchObservedRunningTime="2025-10-01 14:03:49.179627463 +0000 UTC m=+1151.923603661" Oct 01 14:03:49 crc kubenswrapper[4605]: I1001 14:03:49.686748 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-76d9c5f468-cdf7s" Oct 01 14:03:50 crc kubenswrapper[4605]: I1001 14:03:50.170053 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c601edbd-d09b-48b1-bc0b-40c01b76468c","Type":"ContainerStarted","Data":"b6b3c5aa12525d71d836509cef2357ce28d6877d9e39ca6b90e8072034920cce"} Oct 01 14:03:50 crc kubenswrapper[4605]: I1001 14:03:50.177426 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-66b5967899-cv4c4" Oct 01 14:03:51 crc kubenswrapper[4605]: I1001 14:03:51.630912 4605 patch_prober.go:28] interesting pod/machine-config-daemon-zdjh7 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 14:03:51 crc kubenswrapper[4605]: I1001 14:03:51.631485 4605 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 14:03:52 crc kubenswrapper[4605]: I1001 14:03:52.191829 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c601edbd-d09b-48b1-bc0b-40c01b76468c","Type":"ContainerStarted","Data":"005a5fa58a06e469de286a6ba79370ebec9decb47f4c6435fe51867e1d2156ae"} Oct 01 14:03:53 crc kubenswrapper[4605]: I1001 14:03:53.013220 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-hrfc9"] Oct 01 14:03:53 crc kubenswrapper[4605]: I1001 14:03:53.014364 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-hrfc9" Oct 01 14:03:53 crc kubenswrapper[4605]: I1001 14:03:53.034770 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-hrfc9"] Oct 01 14:03:53 crc kubenswrapper[4605]: I1001 14:03:53.136910 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6g6w6\" (UniqueName: \"kubernetes.io/projected/1186ce81-fb42-4a2e-ad07-6a63d580a2b1-kube-api-access-6g6w6\") pod \"nova-api-db-create-hrfc9\" (UID: \"1186ce81-fb42-4a2e-ad07-6a63d580a2b1\") " pod="openstack/nova-api-db-create-hrfc9" Oct 01 14:03:53 crc kubenswrapper[4605]: I1001 14:03:53.238948 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6g6w6\" (UniqueName: \"kubernetes.io/projected/1186ce81-fb42-4a2e-ad07-6a63d580a2b1-kube-api-access-6g6w6\") pod \"nova-api-db-create-hrfc9\" (UID: \"1186ce81-fb42-4a2e-ad07-6a63d580a2b1\") " pod="openstack/nova-api-db-create-hrfc9" Oct 01 14:03:53 crc kubenswrapper[4605]: I1001 14:03:53.266049 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6g6w6\" (UniqueName: \"kubernetes.io/projected/1186ce81-fb42-4a2e-ad07-6a63d580a2b1-kube-api-access-6g6w6\") pod \"nova-api-db-create-hrfc9\" (UID: \"1186ce81-fb42-4a2e-ad07-6a63d580a2b1\") " pod="openstack/nova-api-db-create-hrfc9" Oct 01 14:03:53 crc kubenswrapper[4605]: I1001 14:03:53.331575 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-hrfc9" Oct 01 14:03:53 crc kubenswrapper[4605]: I1001 14:03:53.374189 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-w4cl4"] Oct 01 14:03:53 crc kubenswrapper[4605]: I1001 14:03:53.375578 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-w4cl4" Oct 01 14:03:53 crc kubenswrapper[4605]: I1001 14:03:53.396621 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-w4cl4"] Oct 01 14:03:53 crc kubenswrapper[4605]: I1001 14:03:53.442391 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tjccg\" (UniqueName: \"kubernetes.io/projected/c5e8f6b7-5048-4809-96df-46f7bf6dba10-kube-api-access-tjccg\") pod \"nova-cell0-db-create-w4cl4\" (UID: \"c5e8f6b7-5048-4809-96df-46f7bf6dba10\") " pod="openstack/nova-cell0-db-create-w4cl4" Oct 01 14:03:53 crc kubenswrapper[4605]: I1001 14:03:53.467031 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-66b5967899-cv4c4" Oct 01 14:03:53 crc kubenswrapper[4605]: I1001 14:03:53.512744 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Oct 01 14:03:53 crc kubenswrapper[4605]: I1001 14:03:53.522457 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-nwhc8"] Oct 01 14:03:53 crc kubenswrapper[4605]: I1001 14:03:53.524204 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-nwhc8" Oct 01 14:03:53 crc kubenswrapper[4605]: I1001 14:03:53.527707 4605 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-749477d64b-5dpnm" podUID="19af3ac5-8b1a-4301-88a2-96ae085ee9e0" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.150:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.150:8443: connect: connection refused" Oct 01 14:03:53 crc kubenswrapper[4605]: I1001 14:03:53.545624 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tjccg\" (UniqueName: \"kubernetes.io/projected/c5e8f6b7-5048-4809-96df-46f7bf6dba10-kube-api-access-tjccg\") pod \"nova-cell0-db-create-w4cl4\" (UID: \"c5e8f6b7-5048-4809-96df-46f7bf6dba10\") " pod="openstack/nova-cell0-db-create-w4cl4" Oct 01 14:03:53 crc kubenswrapper[4605]: I1001 14:03:53.598210 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-nwhc8"] Oct 01 14:03:53 crc kubenswrapper[4605]: I1001 14:03:53.618182 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tjccg\" (UniqueName: \"kubernetes.io/projected/c5e8f6b7-5048-4809-96df-46f7bf6dba10-kube-api-access-tjccg\") pod \"nova-cell0-db-create-w4cl4\" (UID: \"c5e8f6b7-5048-4809-96df-46f7bf6dba10\") " pod="openstack/nova-cell0-db-create-w4cl4" Oct 01 14:03:53 crc kubenswrapper[4605]: I1001 14:03:53.661274 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-75kv4\" (UniqueName: \"kubernetes.io/projected/99b2e3e9-ede9-4ece-9e25-8d4900e33264-kube-api-access-75kv4\") pod \"nova-cell1-db-create-nwhc8\" (UID: \"99b2e3e9-ede9-4ece-9e25-8d4900e33264\") " pod="openstack/nova-cell1-db-create-nwhc8" Oct 01 14:03:53 crc kubenswrapper[4605]: I1001 14:03:53.737445 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-w4cl4" Oct 01 14:03:53 crc kubenswrapper[4605]: I1001 14:03:53.763519 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-75kv4\" (UniqueName: \"kubernetes.io/projected/99b2e3e9-ede9-4ece-9e25-8d4900e33264-kube-api-access-75kv4\") pod \"nova-cell1-db-create-nwhc8\" (UID: \"99b2e3e9-ede9-4ece-9e25-8d4900e33264\") " pod="openstack/nova-cell1-db-create-nwhc8" Oct 01 14:03:53 crc kubenswrapper[4605]: I1001 14:03:53.811034 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-75kv4\" (UniqueName: \"kubernetes.io/projected/99b2e3e9-ede9-4ece-9e25-8d4900e33264-kube-api-access-75kv4\") pod \"nova-cell1-db-create-nwhc8\" (UID: \"99b2e3e9-ede9-4ece-9e25-8d4900e33264\") " pod="openstack/nova-cell1-db-create-nwhc8" Oct 01 14:03:53 crc kubenswrapper[4605]: I1001 14:03:53.978590 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Oct 01 14:03:53 crc kubenswrapper[4605]: I1001 14:03:53.983735 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-749477d64b-5dpnm" Oct 01 14:03:53 crc kubenswrapper[4605]: E1001 14:03:53.994516 4605 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod19af3ac5_8b1a_4301_88a2_96ae085ee9e0.slice/crio-6a2599a0ed41e906ee251f3a471189a1a7710e3f7990ee28a804ea625997a8bf.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod19af3ac5_8b1a_4301_88a2_96ae085ee9e0.slice/crio-conmon-6a2599a0ed41e906ee251f3a471189a1a7710e3f7990ee28a804ea625997a8bf.scope\": RecentStats: unable to find data in memory cache]" Oct 01 14:03:53 crc kubenswrapper[4605]: I1001 14:03:53.995610 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-nwhc8" Oct 01 14:03:54 crc kubenswrapper[4605]: I1001 14:03:54.068805 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/19af3ac5-8b1a-4301-88a2-96ae085ee9e0-horizon-secret-key\") pod \"19af3ac5-8b1a-4301-88a2-96ae085ee9e0\" (UID: \"19af3ac5-8b1a-4301-88a2-96ae085ee9e0\") " Oct 01 14:03:54 crc kubenswrapper[4605]: I1001 14:03:54.068923 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/19af3ac5-8b1a-4301-88a2-96ae085ee9e0-horizon-tls-certs\") pod \"19af3ac5-8b1a-4301-88a2-96ae085ee9e0\" (UID: \"19af3ac5-8b1a-4301-88a2-96ae085ee9e0\") " Oct 01 14:03:54 crc kubenswrapper[4605]: I1001 14:03:54.068951 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/19af3ac5-8b1a-4301-88a2-96ae085ee9e0-logs\") pod \"19af3ac5-8b1a-4301-88a2-96ae085ee9e0\" (UID: \"19af3ac5-8b1a-4301-88a2-96ae085ee9e0\") " Oct 01 14:03:54 crc kubenswrapper[4605]: I1001 14:03:54.068980 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/19af3ac5-8b1a-4301-88a2-96ae085ee9e0-config-data\") pod \"19af3ac5-8b1a-4301-88a2-96ae085ee9e0\" (UID: \"19af3ac5-8b1a-4301-88a2-96ae085ee9e0\") " Oct 01 14:03:54 crc kubenswrapper[4605]: I1001 14:03:54.069032 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wwpd2\" (UniqueName: \"kubernetes.io/projected/19af3ac5-8b1a-4301-88a2-96ae085ee9e0-kube-api-access-wwpd2\") pod \"19af3ac5-8b1a-4301-88a2-96ae085ee9e0\" (UID: \"19af3ac5-8b1a-4301-88a2-96ae085ee9e0\") " Oct 01 14:03:54 crc kubenswrapper[4605]: I1001 14:03:54.069082 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19af3ac5-8b1a-4301-88a2-96ae085ee9e0-combined-ca-bundle\") pod \"19af3ac5-8b1a-4301-88a2-96ae085ee9e0\" (UID: \"19af3ac5-8b1a-4301-88a2-96ae085ee9e0\") " Oct 01 14:03:54 crc kubenswrapper[4605]: I1001 14:03:54.069143 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/19af3ac5-8b1a-4301-88a2-96ae085ee9e0-scripts\") pod \"19af3ac5-8b1a-4301-88a2-96ae085ee9e0\" (UID: \"19af3ac5-8b1a-4301-88a2-96ae085ee9e0\") " Oct 01 14:03:54 crc kubenswrapper[4605]: I1001 14:03:54.076021 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/19af3ac5-8b1a-4301-88a2-96ae085ee9e0-logs" (OuterVolumeSpecName: "logs") pod "19af3ac5-8b1a-4301-88a2-96ae085ee9e0" (UID: "19af3ac5-8b1a-4301-88a2-96ae085ee9e0"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:03:54 crc kubenswrapper[4605]: I1001 14:03:54.077215 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/19af3ac5-8b1a-4301-88a2-96ae085ee9e0-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "19af3ac5-8b1a-4301-88a2-96ae085ee9e0" (UID: "19af3ac5-8b1a-4301-88a2-96ae085ee9e0"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:03:54 crc kubenswrapper[4605]: I1001 14:03:54.077283 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/19af3ac5-8b1a-4301-88a2-96ae085ee9e0-kube-api-access-wwpd2" (OuterVolumeSpecName: "kube-api-access-wwpd2") pod "19af3ac5-8b1a-4301-88a2-96ae085ee9e0" (UID: "19af3ac5-8b1a-4301-88a2-96ae085ee9e0"). InnerVolumeSpecName "kube-api-access-wwpd2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:03:54 crc kubenswrapper[4605]: I1001 14:03:54.104867 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/19af3ac5-8b1a-4301-88a2-96ae085ee9e0-config-data" (OuterVolumeSpecName: "config-data") pod "19af3ac5-8b1a-4301-88a2-96ae085ee9e0" (UID: "19af3ac5-8b1a-4301-88a2-96ae085ee9e0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:03:54 crc kubenswrapper[4605]: I1001 14:03:54.115143 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/19af3ac5-8b1a-4301-88a2-96ae085ee9e0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "19af3ac5-8b1a-4301-88a2-96ae085ee9e0" (UID: "19af3ac5-8b1a-4301-88a2-96ae085ee9e0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:03:54 crc kubenswrapper[4605]: I1001 14:03:54.128976 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/19af3ac5-8b1a-4301-88a2-96ae085ee9e0-scripts" (OuterVolumeSpecName: "scripts") pod "19af3ac5-8b1a-4301-88a2-96ae085ee9e0" (UID: "19af3ac5-8b1a-4301-88a2-96ae085ee9e0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:03:54 crc kubenswrapper[4605]: I1001 14:03:54.167325 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/19af3ac5-8b1a-4301-88a2-96ae085ee9e0-horizon-tls-certs" (OuterVolumeSpecName: "horizon-tls-certs") pod "19af3ac5-8b1a-4301-88a2-96ae085ee9e0" (UID: "19af3ac5-8b1a-4301-88a2-96ae085ee9e0"). InnerVolumeSpecName "horizon-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:03:54 crc kubenswrapper[4605]: I1001 14:03:54.170869 4605 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/19af3ac5-8b1a-4301-88a2-96ae085ee9e0-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Oct 01 14:03:54 crc kubenswrapper[4605]: I1001 14:03:54.170897 4605 reconciler_common.go:293] "Volume detached for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/19af3ac5-8b1a-4301-88a2-96ae085ee9e0-horizon-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 01 14:03:54 crc kubenswrapper[4605]: I1001 14:03:54.170907 4605 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/19af3ac5-8b1a-4301-88a2-96ae085ee9e0-logs\") on node \"crc\" DevicePath \"\"" Oct 01 14:03:54 crc kubenswrapper[4605]: I1001 14:03:54.170915 4605 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/19af3ac5-8b1a-4301-88a2-96ae085ee9e0-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 14:03:54 crc kubenswrapper[4605]: I1001 14:03:54.170923 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wwpd2\" (UniqueName: \"kubernetes.io/projected/19af3ac5-8b1a-4301-88a2-96ae085ee9e0-kube-api-access-wwpd2\") on node \"crc\" DevicePath \"\"" Oct 01 14:03:54 crc kubenswrapper[4605]: I1001 14:03:54.170933 4605 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19af3ac5-8b1a-4301-88a2-96ae085ee9e0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 14:03:54 crc kubenswrapper[4605]: I1001 14:03:54.170940 4605 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/19af3ac5-8b1a-4301-88a2-96ae085ee9e0-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 14:03:54 crc kubenswrapper[4605]: I1001 14:03:54.193204 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-hrfc9"] Oct 01 14:03:54 crc kubenswrapper[4605]: I1001 14:03:54.227446 4605 generic.go:334] "Generic (PLEG): container finished" podID="19af3ac5-8b1a-4301-88a2-96ae085ee9e0" containerID="6a2599a0ed41e906ee251f3a471189a1a7710e3f7990ee28a804ea625997a8bf" exitCode=137 Oct 01 14:03:54 crc kubenswrapper[4605]: I1001 14:03:54.228620 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-749477d64b-5dpnm" Oct 01 14:03:54 crc kubenswrapper[4605]: I1001 14:03:54.233407 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-749477d64b-5dpnm" event={"ID":"19af3ac5-8b1a-4301-88a2-96ae085ee9e0","Type":"ContainerDied","Data":"6a2599a0ed41e906ee251f3a471189a1a7710e3f7990ee28a804ea625997a8bf"} Oct 01 14:03:54 crc kubenswrapper[4605]: I1001 14:03:54.233471 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-749477d64b-5dpnm" event={"ID":"19af3ac5-8b1a-4301-88a2-96ae085ee9e0","Type":"ContainerDied","Data":"d328797382c8dacd49fad4c3955ddc7b0e591a725e04cf9f988664bc97196fc8"} Oct 01 14:03:54 crc kubenswrapper[4605]: I1001 14:03:54.233489 4605 scope.go:117] "RemoveContainer" containerID="423669f3dc500c10e2f1d04f16ebcb37769e2415aca599a5a0c901725e42b5f9" Oct 01 14:03:54 crc kubenswrapper[4605]: I1001 14:03:54.320219 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-749477d64b-5dpnm"] Oct 01 14:03:54 crc kubenswrapper[4605]: I1001 14:03:54.347300 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-749477d64b-5dpnm"] Oct 01 14:03:54 crc kubenswrapper[4605]: I1001 14:03:54.431280 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-w4cl4"] Oct 01 14:03:54 crc kubenswrapper[4605]: I1001 14:03:54.457555 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-nwhc8"] Oct 01 14:03:54 crc kubenswrapper[4605]: W1001 14:03:54.489707 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc5e8f6b7_5048_4809_96df_46f7bf6dba10.slice/crio-6a07578af6d23ad7aa8f79f79104eb608c715444ed94b525361b5be03ffc67b3 WatchSource:0}: Error finding container 6a07578af6d23ad7aa8f79f79104eb608c715444ed94b525361b5be03ffc67b3: Status 404 returned error can't find the container with id 6a07578af6d23ad7aa8f79f79104eb608c715444ed94b525361b5be03ffc67b3 Oct 01 14:03:54 crc kubenswrapper[4605]: I1001 14:03:54.489815 4605 scope.go:117] "RemoveContainer" containerID="6a2599a0ed41e906ee251f3a471189a1a7710e3f7990ee28a804ea625997a8bf" Oct 01 14:03:54 crc kubenswrapper[4605]: I1001 14:03:54.706511 4605 scope.go:117] "RemoveContainer" containerID="423669f3dc500c10e2f1d04f16ebcb37769e2415aca599a5a0c901725e42b5f9" Oct 01 14:03:54 crc kubenswrapper[4605]: E1001 14:03:54.709517 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"423669f3dc500c10e2f1d04f16ebcb37769e2415aca599a5a0c901725e42b5f9\": container with ID starting with 423669f3dc500c10e2f1d04f16ebcb37769e2415aca599a5a0c901725e42b5f9 not found: ID does not exist" containerID="423669f3dc500c10e2f1d04f16ebcb37769e2415aca599a5a0c901725e42b5f9" Oct 01 14:03:54 crc kubenswrapper[4605]: I1001 14:03:54.709630 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"423669f3dc500c10e2f1d04f16ebcb37769e2415aca599a5a0c901725e42b5f9"} err="failed to get container status \"423669f3dc500c10e2f1d04f16ebcb37769e2415aca599a5a0c901725e42b5f9\": rpc error: code = NotFound desc = could not find container \"423669f3dc500c10e2f1d04f16ebcb37769e2415aca599a5a0c901725e42b5f9\": container with ID starting with 423669f3dc500c10e2f1d04f16ebcb37769e2415aca599a5a0c901725e42b5f9 not found: ID does not exist" Oct 01 14:03:54 crc kubenswrapper[4605]: I1001 14:03:54.709721 4605 scope.go:117] "RemoveContainer" containerID="6a2599a0ed41e906ee251f3a471189a1a7710e3f7990ee28a804ea625997a8bf" Oct 01 14:03:54 crc kubenswrapper[4605]: E1001 14:03:54.711173 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6a2599a0ed41e906ee251f3a471189a1a7710e3f7990ee28a804ea625997a8bf\": container with ID starting with 6a2599a0ed41e906ee251f3a471189a1a7710e3f7990ee28a804ea625997a8bf not found: ID does not exist" containerID="6a2599a0ed41e906ee251f3a471189a1a7710e3f7990ee28a804ea625997a8bf" Oct 01 14:03:54 crc kubenswrapper[4605]: I1001 14:03:54.711279 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6a2599a0ed41e906ee251f3a471189a1a7710e3f7990ee28a804ea625997a8bf"} err="failed to get container status \"6a2599a0ed41e906ee251f3a471189a1a7710e3f7990ee28a804ea625997a8bf\": rpc error: code = NotFound desc = could not find container \"6a2599a0ed41e906ee251f3a471189a1a7710e3f7990ee28a804ea625997a8bf\": container with ID starting with 6a2599a0ed41e906ee251f3a471189a1a7710e3f7990ee28a804ea625997a8bf not found: ID does not exist" Oct 01 14:03:55 crc kubenswrapper[4605]: I1001 14:03:55.237931 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c601edbd-d09b-48b1-bc0b-40c01b76468c","Type":"ContainerStarted","Data":"c1933dd261b4542e6fa28100192fa2628f78c08d9c0c493718f22f2d025e6334"} Oct 01 14:03:55 crc kubenswrapper[4605]: I1001 14:03:55.238036 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c601edbd-d09b-48b1-bc0b-40c01b76468c" containerName="ceilometer-central-agent" containerID="cri-o://aaa16ea88fffe7c66bbc655644ce223b031f9f8d10ba1fcaf42752892fec8978" gracePeriod=30 Oct 01 14:03:55 crc kubenswrapper[4605]: I1001 14:03:55.238108 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 01 14:03:55 crc kubenswrapper[4605]: I1001 14:03:55.238162 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c601edbd-d09b-48b1-bc0b-40c01b76468c" containerName="proxy-httpd" containerID="cri-o://c1933dd261b4542e6fa28100192fa2628f78c08d9c0c493718f22f2d025e6334" gracePeriod=30 Oct 01 14:03:55 crc kubenswrapper[4605]: I1001 14:03:55.238202 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c601edbd-d09b-48b1-bc0b-40c01b76468c" containerName="sg-core" containerID="cri-o://005a5fa58a06e469de286a6ba79370ebec9decb47f4c6435fe51867e1d2156ae" gracePeriod=30 Oct 01 14:03:55 crc kubenswrapper[4605]: I1001 14:03:55.238234 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c601edbd-d09b-48b1-bc0b-40c01b76468c" containerName="ceilometer-notification-agent" containerID="cri-o://b6b3c5aa12525d71d836509cef2357ce28d6877d9e39ca6b90e8072034920cce" gracePeriod=30 Oct 01 14:03:55 crc kubenswrapper[4605]: I1001 14:03:55.247301 4605 generic.go:334] "Generic (PLEG): container finished" podID="99b2e3e9-ede9-4ece-9e25-8d4900e33264" containerID="250d6b8a09b193137b67bd55f34b17a3f4bf3c4d1882f5cb82360a8a042a2879" exitCode=0 Oct 01 14:03:55 crc kubenswrapper[4605]: I1001 14:03:55.247370 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-nwhc8" event={"ID":"99b2e3e9-ede9-4ece-9e25-8d4900e33264","Type":"ContainerDied","Data":"250d6b8a09b193137b67bd55f34b17a3f4bf3c4d1882f5cb82360a8a042a2879"} Oct 01 14:03:55 crc kubenswrapper[4605]: I1001 14:03:55.247408 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-nwhc8" event={"ID":"99b2e3e9-ede9-4ece-9e25-8d4900e33264","Type":"ContainerStarted","Data":"57a17ac6d796562f1564796e232ccb0fdb551597d14435048bc5f7e7f7947442"} Oct 01 14:03:55 crc kubenswrapper[4605]: I1001 14:03:55.250899 4605 generic.go:334] "Generic (PLEG): container finished" podID="1186ce81-fb42-4a2e-ad07-6a63d580a2b1" containerID="e334721b61dc5d90affcd8f0a3e2285d2a8cb08cd629cd21a2a518ddf97af508" exitCode=0 Oct 01 14:03:55 crc kubenswrapper[4605]: I1001 14:03:55.250956 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-hrfc9" event={"ID":"1186ce81-fb42-4a2e-ad07-6a63d580a2b1","Type":"ContainerDied","Data":"e334721b61dc5d90affcd8f0a3e2285d2a8cb08cd629cd21a2a518ddf97af508"} Oct 01 14:03:55 crc kubenswrapper[4605]: I1001 14:03:55.250982 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-hrfc9" event={"ID":"1186ce81-fb42-4a2e-ad07-6a63d580a2b1","Type":"ContainerStarted","Data":"45966a7273dd8bb7fbcd4f69f671106b3225b46d7a1d6b48bdfc912c5a6e5513"} Oct 01 14:03:55 crc kubenswrapper[4605]: I1001 14:03:55.256580 4605 generic.go:334] "Generic (PLEG): container finished" podID="c5e8f6b7-5048-4809-96df-46f7bf6dba10" containerID="123cca5e71db19976a221cc6cca0ba3d968e708fea37788101a6847e016d065c" exitCode=0 Oct 01 14:03:55 crc kubenswrapper[4605]: I1001 14:03:55.256622 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-w4cl4" event={"ID":"c5e8f6b7-5048-4809-96df-46f7bf6dba10","Type":"ContainerDied","Data":"123cca5e71db19976a221cc6cca0ba3d968e708fea37788101a6847e016d065c"} Oct 01 14:03:55 crc kubenswrapper[4605]: I1001 14:03:55.256646 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-w4cl4" event={"ID":"c5e8f6b7-5048-4809-96df-46f7bf6dba10","Type":"ContainerStarted","Data":"6a07578af6d23ad7aa8f79f79104eb608c715444ed94b525361b5be03ffc67b3"} Oct 01 14:03:55 crc kubenswrapper[4605]: I1001 14:03:55.270669 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=9.895633177 podStartE2EDuration="16.27064981s" podCreationTimestamp="2025-10-01 14:03:39 +0000 UTC" firstStartedPulling="2025-10-01 14:03:48.240825178 +0000 UTC m=+1150.984801386" lastFinishedPulling="2025-10-01 14:03:54.615841811 +0000 UTC m=+1157.359818019" observedRunningTime="2025-10-01 14:03:55.26787601 +0000 UTC m=+1158.011852218" watchObservedRunningTime="2025-10-01 14:03:55.27064981 +0000 UTC m=+1158.014626018" Oct 01 14:03:55 crc kubenswrapper[4605]: I1001 14:03:55.938633 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="19af3ac5-8b1a-4301-88a2-96ae085ee9e0" path="/var/lib/kubelet/pods/19af3ac5-8b1a-4301-88a2-96ae085ee9e0/volumes" Oct 01 14:03:56 crc kubenswrapper[4605]: I1001 14:03:56.081521 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-f85558977-q9rhp" Oct 01 14:03:56 crc kubenswrapper[4605]: I1001 14:03:56.115683 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 01 14:03:56 crc kubenswrapper[4605]: I1001 14:03:56.115940 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="46085e14-7d7e-490a-8078-d2a40a4f3498" containerName="glance-log" containerID="cri-o://1f9765bf6a057c7fb2b1034b8d8edffcad1efa83c7bc3f02af8c15cd97df0a4a" gracePeriod=30 Oct 01 14:03:56 crc kubenswrapper[4605]: I1001 14:03:56.116015 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="46085e14-7d7e-490a-8078-d2a40a4f3498" containerName="glance-httpd" containerID="cri-o://ebe349f06b2cd7eb9c3f8125823bdea98434ad65928df64bbd74ad27f86de5c1" gracePeriod=30 Oct 01 14:03:56 crc kubenswrapper[4605]: I1001 14:03:56.157228 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-76d9c5f468-cdf7s"] Oct 01 14:03:56 crc kubenswrapper[4605]: I1001 14:03:56.157494 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-76d9c5f468-cdf7s" podUID="33bae256-76ac-47a8-b5d6-84df403fb294" containerName="neutron-api" containerID="cri-o://9295dbb20a5defeafe207475f4bb052a4a5c0848cd78d13c0bc653ff9174bd58" gracePeriod=30 Oct 01 14:03:56 crc kubenswrapper[4605]: I1001 14:03:56.157587 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-76d9c5f468-cdf7s" podUID="33bae256-76ac-47a8-b5d6-84df403fb294" containerName="neutron-httpd" containerID="cri-o://b08d83199c13552e355c429f0810c2d68f6be6bd4e1d171dd46d1f00abb8b248" gracePeriod=30 Oct 01 14:03:56 crc kubenswrapper[4605]: I1001 14:03:56.274498 4605 generic.go:334] "Generic (PLEG): container finished" podID="c601edbd-d09b-48b1-bc0b-40c01b76468c" containerID="c1933dd261b4542e6fa28100192fa2628f78c08d9c0c493718f22f2d025e6334" exitCode=0 Oct 01 14:03:56 crc kubenswrapper[4605]: I1001 14:03:56.275002 4605 generic.go:334] "Generic (PLEG): container finished" podID="c601edbd-d09b-48b1-bc0b-40c01b76468c" containerID="005a5fa58a06e469de286a6ba79370ebec9decb47f4c6435fe51867e1d2156ae" exitCode=2 Oct 01 14:03:56 crc kubenswrapper[4605]: I1001 14:03:56.275033 4605 generic.go:334] "Generic (PLEG): container finished" podID="c601edbd-d09b-48b1-bc0b-40c01b76468c" containerID="b6b3c5aa12525d71d836509cef2357ce28d6877d9e39ca6b90e8072034920cce" exitCode=0 Oct 01 14:03:56 crc kubenswrapper[4605]: I1001 14:03:56.274938 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c601edbd-d09b-48b1-bc0b-40c01b76468c","Type":"ContainerDied","Data":"c1933dd261b4542e6fa28100192fa2628f78c08d9c0c493718f22f2d025e6334"} Oct 01 14:03:56 crc kubenswrapper[4605]: I1001 14:03:56.275123 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c601edbd-d09b-48b1-bc0b-40c01b76468c","Type":"ContainerDied","Data":"005a5fa58a06e469de286a6ba79370ebec9decb47f4c6435fe51867e1d2156ae"} Oct 01 14:03:56 crc kubenswrapper[4605]: I1001 14:03:56.275136 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c601edbd-d09b-48b1-bc0b-40c01b76468c","Type":"ContainerDied","Data":"b6b3c5aa12525d71d836509cef2357ce28d6877d9e39ca6b90e8072034920cce"} Oct 01 14:03:56 crc kubenswrapper[4605]: I1001 14:03:56.283147 4605 generic.go:334] "Generic (PLEG): container finished" podID="46085e14-7d7e-490a-8078-d2a40a4f3498" containerID="1f9765bf6a057c7fb2b1034b8d8edffcad1efa83c7bc3f02af8c15cd97df0a4a" exitCode=143 Oct 01 14:03:56 crc kubenswrapper[4605]: I1001 14:03:56.283307 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"46085e14-7d7e-490a-8078-d2a40a4f3498","Type":"ContainerDied","Data":"1f9765bf6a057c7fb2b1034b8d8edffcad1efa83c7bc3f02af8c15cd97df0a4a"} Oct 01 14:03:56 crc kubenswrapper[4605]: I1001 14:03:56.809834 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-nwhc8" Oct 01 14:03:56 crc kubenswrapper[4605]: I1001 14:03:56.910266 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-hrfc9" Oct 01 14:03:56 crc kubenswrapper[4605]: I1001 14:03:56.919083 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-w4cl4" Oct 01 14:03:56 crc kubenswrapper[4605]: I1001 14:03:56.940189 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-75kv4\" (UniqueName: \"kubernetes.io/projected/99b2e3e9-ede9-4ece-9e25-8d4900e33264-kube-api-access-75kv4\") pod \"99b2e3e9-ede9-4ece-9e25-8d4900e33264\" (UID: \"99b2e3e9-ede9-4ece-9e25-8d4900e33264\") " Oct 01 14:03:56 crc kubenswrapper[4605]: I1001 14:03:56.945754 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/99b2e3e9-ede9-4ece-9e25-8d4900e33264-kube-api-access-75kv4" (OuterVolumeSpecName: "kube-api-access-75kv4") pod "99b2e3e9-ede9-4ece-9e25-8d4900e33264" (UID: "99b2e3e9-ede9-4ece-9e25-8d4900e33264"). InnerVolumeSpecName "kube-api-access-75kv4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:03:57 crc kubenswrapper[4605]: I1001 14:03:57.041598 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tjccg\" (UniqueName: \"kubernetes.io/projected/c5e8f6b7-5048-4809-96df-46f7bf6dba10-kube-api-access-tjccg\") pod \"c5e8f6b7-5048-4809-96df-46f7bf6dba10\" (UID: \"c5e8f6b7-5048-4809-96df-46f7bf6dba10\") " Oct 01 14:03:57 crc kubenswrapper[4605]: I1001 14:03:57.041960 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6w6\" (UniqueName: \"kubernetes.io/projected/1186ce81-fb42-4a2e-ad07-6a63d580a2b1-kube-api-access-6g6w6\") pod \"1186ce81-fb42-4a2e-ad07-6a63d580a2b1\" (UID: \"1186ce81-fb42-4a2e-ad07-6a63d580a2b1\") " Oct 01 14:03:57 crc kubenswrapper[4605]: I1001 14:03:57.043765 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-75kv4\" (UniqueName: \"kubernetes.io/projected/99b2e3e9-ede9-4ece-9e25-8d4900e33264-kube-api-access-75kv4\") on node \"crc\" DevicePath \"\"" Oct 01 14:03:57 crc kubenswrapper[4605]: I1001 14:03:57.045665 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c5e8f6b7-5048-4809-96df-46f7bf6dba10-kube-api-access-tjccg" (OuterVolumeSpecName: "kube-api-access-tjccg") pod "c5e8f6b7-5048-4809-96df-46f7bf6dba10" (UID: "c5e8f6b7-5048-4809-96df-46f7bf6dba10"). InnerVolumeSpecName "kube-api-access-tjccg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:03:57 crc kubenswrapper[4605]: I1001 14:03:57.046230 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1186ce81-fb42-4a2e-ad07-6a63d580a2b1-kube-api-access-6g6w6" (OuterVolumeSpecName: "kube-api-access-6g6w6") pod "1186ce81-fb42-4a2e-ad07-6a63d580a2b1" (UID: "1186ce81-fb42-4a2e-ad07-6a63d580a2b1"). InnerVolumeSpecName "kube-api-access-6g6w6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:03:57 crc kubenswrapper[4605]: I1001 14:03:57.145374 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6w6\" (UniqueName: \"kubernetes.io/projected/1186ce81-fb42-4a2e-ad07-6a63d580a2b1-kube-api-access-6g6w6\") on node \"crc\" DevicePath \"\"" Oct 01 14:03:57 crc kubenswrapper[4605]: I1001 14:03:57.145407 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tjccg\" (UniqueName: \"kubernetes.io/projected/c5e8f6b7-5048-4809-96df-46f7bf6dba10-kube-api-access-tjccg\") on node \"crc\" DevicePath \"\"" Oct 01 14:03:57 crc kubenswrapper[4605]: I1001 14:03:57.293446 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-nwhc8" Oct 01 14:03:57 crc kubenswrapper[4605]: I1001 14:03:57.297724 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-nwhc8" event={"ID":"99b2e3e9-ede9-4ece-9e25-8d4900e33264","Type":"ContainerDied","Data":"57a17ac6d796562f1564796e232ccb0fdb551597d14435048bc5f7e7f7947442"} Oct 01 14:03:57 crc kubenswrapper[4605]: I1001 14:03:57.297935 4605 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="57a17ac6d796562f1564796e232ccb0fdb551597d14435048bc5f7e7f7947442" Oct 01 14:03:57 crc kubenswrapper[4605]: I1001 14:03:57.299323 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-hrfc9" event={"ID":"1186ce81-fb42-4a2e-ad07-6a63d580a2b1","Type":"ContainerDied","Data":"45966a7273dd8bb7fbcd4f69f671106b3225b46d7a1d6b48bdfc912c5a6e5513"} Oct 01 14:03:57 crc kubenswrapper[4605]: I1001 14:03:57.299683 4605 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="45966a7273dd8bb7fbcd4f69f671106b3225b46d7a1d6b48bdfc912c5a6e5513" Oct 01 14:03:57 crc kubenswrapper[4605]: I1001 14:03:57.299517 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-hrfc9" Oct 01 14:03:57 crc kubenswrapper[4605]: I1001 14:03:57.300930 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-w4cl4" event={"ID":"c5e8f6b7-5048-4809-96df-46f7bf6dba10","Type":"ContainerDied","Data":"6a07578af6d23ad7aa8f79f79104eb608c715444ed94b525361b5be03ffc67b3"} Oct 01 14:03:57 crc kubenswrapper[4605]: I1001 14:03:57.300967 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-w4cl4" Oct 01 14:03:57 crc kubenswrapper[4605]: I1001 14:03:57.300975 4605 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6a07578af6d23ad7aa8f79f79104eb608c715444ed94b525361b5be03ffc67b3" Oct 01 14:03:57 crc kubenswrapper[4605]: I1001 14:03:57.302878 4605 generic.go:334] "Generic (PLEG): container finished" podID="33bae256-76ac-47a8-b5d6-84df403fb294" containerID="b08d83199c13552e355c429f0810c2d68f6be6bd4e1d171dd46d1f00abb8b248" exitCode=0 Oct 01 14:03:57 crc kubenswrapper[4605]: I1001 14:03:57.302931 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-76d9c5f468-cdf7s" event={"ID":"33bae256-76ac-47a8-b5d6-84df403fb294","Type":"ContainerDied","Data":"b08d83199c13552e355c429f0810c2d68f6be6bd4e1d171dd46d1f00abb8b248"} Oct 01 14:03:57 crc kubenswrapper[4605]: I1001 14:03:57.509948 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 01 14:03:57 crc kubenswrapper[4605]: I1001 14:03:57.510211 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d" containerName="glance-log" containerID="cri-o://292676769d30f49539bb03392db2114220120cee6f940246def06af3828c68ac" gracePeriod=30 Oct 01 14:03:57 crc kubenswrapper[4605]: I1001 14:03:57.510330 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d" containerName="glance-httpd" containerID="cri-o://7635e8638a970607c746051057cbf7b68b7eaa1b3669d8943796c103b67ffd15" gracePeriod=30 Oct 01 14:03:58 crc kubenswrapper[4605]: I1001 14:03:58.312384 4605 generic.go:334] "Generic (PLEG): container finished" podID="8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d" containerID="292676769d30f49539bb03392db2114220120cee6f940246def06af3828c68ac" exitCode=143 Oct 01 14:03:58 crc kubenswrapper[4605]: I1001 14:03:58.312454 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d","Type":"ContainerDied","Data":"292676769d30f49539bb03392db2114220120cee6f940246def06af3828c68ac"} Oct 01 14:03:59 crc kubenswrapper[4605]: I1001 14:03:59.345694 4605 generic.go:334] "Generic (PLEG): container finished" podID="33bae256-76ac-47a8-b5d6-84df403fb294" containerID="9295dbb20a5defeafe207475f4bb052a4a5c0848cd78d13c0bc653ff9174bd58" exitCode=0 Oct 01 14:03:59 crc kubenswrapper[4605]: I1001 14:03:59.345752 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-76d9c5f468-cdf7s" event={"ID":"33bae256-76ac-47a8-b5d6-84df403fb294","Type":"ContainerDied","Data":"9295dbb20a5defeafe207475f4bb052a4a5c0848cd78d13c0bc653ff9174bd58"} Oct 01 14:03:59 crc kubenswrapper[4605]: I1001 14:03:59.550321 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-76d9c5f468-cdf7s" Oct 01 14:03:59 crc kubenswrapper[4605]: I1001 14:03:59.602328 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/33bae256-76ac-47a8-b5d6-84df403fb294-config\") pod \"33bae256-76ac-47a8-b5d6-84df403fb294\" (UID: \"33bae256-76ac-47a8-b5d6-84df403fb294\") " Oct 01 14:03:59 crc kubenswrapper[4605]: I1001 14:03:59.602401 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pr2xv\" (UniqueName: \"kubernetes.io/projected/33bae256-76ac-47a8-b5d6-84df403fb294-kube-api-access-pr2xv\") pod \"33bae256-76ac-47a8-b5d6-84df403fb294\" (UID: \"33bae256-76ac-47a8-b5d6-84df403fb294\") " Oct 01 14:03:59 crc kubenswrapper[4605]: I1001 14:03:59.602474 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33bae256-76ac-47a8-b5d6-84df403fb294-combined-ca-bundle\") pod \"33bae256-76ac-47a8-b5d6-84df403fb294\" (UID: \"33bae256-76ac-47a8-b5d6-84df403fb294\") " Oct 01 14:03:59 crc kubenswrapper[4605]: I1001 14:03:59.602570 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/33bae256-76ac-47a8-b5d6-84df403fb294-ovndb-tls-certs\") pod \"33bae256-76ac-47a8-b5d6-84df403fb294\" (UID: \"33bae256-76ac-47a8-b5d6-84df403fb294\") " Oct 01 14:03:59 crc kubenswrapper[4605]: I1001 14:03:59.602589 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/33bae256-76ac-47a8-b5d6-84df403fb294-httpd-config\") pod \"33bae256-76ac-47a8-b5d6-84df403fb294\" (UID: \"33bae256-76ac-47a8-b5d6-84df403fb294\") " Oct 01 14:03:59 crc kubenswrapper[4605]: I1001 14:03:59.611956 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/33bae256-76ac-47a8-b5d6-84df403fb294-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "33bae256-76ac-47a8-b5d6-84df403fb294" (UID: "33bae256-76ac-47a8-b5d6-84df403fb294"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:03:59 crc kubenswrapper[4605]: I1001 14:03:59.622630 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/33bae256-76ac-47a8-b5d6-84df403fb294-kube-api-access-pr2xv" (OuterVolumeSpecName: "kube-api-access-pr2xv") pod "33bae256-76ac-47a8-b5d6-84df403fb294" (UID: "33bae256-76ac-47a8-b5d6-84df403fb294"). InnerVolumeSpecName "kube-api-access-pr2xv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:03:59 crc kubenswrapper[4605]: I1001 14:03:59.704837 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pr2xv\" (UniqueName: \"kubernetes.io/projected/33bae256-76ac-47a8-b5d6-84df403fb294-kube-api-access-pr2xv\") on node \"crc\" DevicePath \"\"" Oct 01 14:03:59 crc kubenswrapper[4605]: I1001 14:03:59.704873 4605 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/33bae256-76ac-47a8-b5d6-84df403fb294-httpd-config\") on node \"crc\" DevicePath \"\"" Oct 01 14:03:59 crc kubenswrapper[4605]: I1001 14:03:59.716893 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/33bae256-76ac-47a8-b5d6-84df403fb294-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "33bae256-76ac-47a8-b5d6-84df403fb294" (UID: "33bae256-76ac-47a8-b5d6-84df403fb294"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:03:59 crc kubenswrapper[4605]: I1001 14:03:59.724179 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/33bae256-76ac-47a8-b5d6-84df403fb294-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "33bae256-76ac-47a8-b5d6-84df403fb294" (UID: "33bae256-76ac-47a8-b5d6-84df403fb294"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:03:59 crc kubenswrapper[4605]: I1001 14:03:59.733044 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/33bae256-76ac-47a8-b5d6-84df403fb294-config" (OuterVolumeSpecName: "config") pod "33bae256-76ac-47a8-b5d6-84df403fb294" (UID: "33bae256-76ac-47a8-b5d6-84df403fb294"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:03:59 crc kubenswrapper[4605]: I1001 14:03:59.806370 4605 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33bae256-76ac-47a8-b5d6-84df403fb294-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 14:03:59 crc kubenswrapper[4605]: I1001 14:03:59.806660 4605 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/33bae256-76ac-47a8-b5d6-84df403fb294-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 01 14:03:59 crc kubenswrapper[4605]: I1001 14:03:59.806672 4605 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/33bae256-76ac-47a8-b5d6-84df403fb294-config\") on node \"crc\" DevicePath \"\"" Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.032113 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.113737 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/46085e14-7d7e-490a-8078-d2a40a4f3498-scripts\") pod \"46085e14-7d7e-490a-8078-d2a40a4f3498\" (UID: \"46085e14-7d7e-490a-8078-d2a40a4f3498\") " Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.113802 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/46085e14-7d7e-490a-8078-d2a40a4f3498-public-tls-certs\") pod \"46085e14-7d7e-490a-8078-d2a40a4f3498\" (UID: \"46085e14-7d7e-490a-8078-d2a40a4f3498\") " Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.113881 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"46085e14-7d7e-490a-8078-d2a40a4f3498\" (UID: \"46085e14-7d7e-490a-8078-d2a40a4f3498\") " Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.113958 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/46085e14-7d7e-490a-8078-d2a40a4f3498-logs\") pod \"46085e14-7d7e-490a-8078-d2a40a4f3498\" (UID: \"46085e14-7d7e-490a-8078-d2a40a4f3498\") " Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.114022 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/46085e14-7d7e-490a-8078-d2a40a4f3498-httpd-run\") pod \"46085e14-7d7e-490a-8078-d2a40a4f3498\" (UID: \"46085e14-7d7e-490a-8078-d2a40a4f3498\") " Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.114043 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/46085e14-7d7e-490a-8078-d2a40a4f3498-config-data\") pod \"46085e14-7d7e-490a-8078-d2a40a4f3498\" (UID: \"46085e14-7d7e-490a-8078-d2a40a4f3498\") " Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.114061 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kwhch\" (UniqueName: \"kubernetes.io/projected/46085e14-7d7e-490a-8078-d2a40a4f3498-kube-api-access-kwhch\") pod \"46085e14-7d7e-490a-8078-d2a40a4f3498\" (UID: \"46085e14-7d7e-490a-8078-d2a40a4f3498\") " Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.114103 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46085e14-7d7e-490a-8078-d2a40a4f3498-combined-ca-bundle\") pod \"46085e14-7d7e-490a-8078-d2a40a4f3498\" (UID: \"46085e14-7d7e-490a-8078-d2a40a4f3498\") " Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.115430 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/46085e14-7d7e-490a-8078-d2a40a4f3498-logs" (OuterVolumeSpecName: "logs") pod "46085e14-7d7e-490a-8078-d2a40a4f3498" (UID: "46085e14-7d7e-490a-8078-d2a40a4f3498"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.116045 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/46085e14-7d7e-490a-8078-d2a40a4f3498-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "46085e14-7d7e-490a-8078-d2a40a4f3498" (UID: "46085e14-7d7e-490a-8078-d2a40a4f3498"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.120756 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "glance") pod "46085e14-7d7e-490a-8078-d2a40a4f3498" (UID: "46085e14-7d7e-490a-8078-d2a40a4f3498"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.118499 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/46085e14-7d7e-490a-8078-d2a40a4f3498-scripts" (OuterVolumeSpecName: "scripts") pod "46085e14-7d7e-490a-8078-d2a40a4f3498" (UID: "46085e14-7d7e-490a-8078-d2a40a4f3498"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.120910 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/46085e14-7d7e-490a-8078-d2a40a4f3498-kube-api-access-kwhch" (OuterVolumeSpecName: "kube-api-access-kwhch") pod "46085e14-7d7e-490a-8078-d2a40a4f3498" (UID: "46085e14-7d7e-490a-8078-d2a40a4f3498"). InnerVolumeSpecName "kube-api-access-kwhch". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.219175 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/46085e14-7d7e-490a-8078-d2a40a4f3498-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "46085e14-7d7e-490a-8078-d2a40a4f3498" (UID: "46085e14-7d7e-490a-8078-d2a40a4f3498"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.225516 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46085e14-7d7e-490a-8078-d2a40a4f3498-combined-ca-bundle\") pod \"46085e14-7d7e-490a-8078-d2a40a4f3498\" (UID: \"46085e14-7d7e-490a-8078-d2a40a4f3498\") " Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.226232 4605 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/46085e14-7d7e-490a-8078-d2a40a4f3498-logs\") on node \"crc\" DevicePath \"\"" Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.226250 4605 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/46085e14-7d7e-490a-8078-d2a40a4f3498-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.226260 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kwhch\" (UniqueName: \"kubernetes.io/projected/46085e14-7d7e-490a-8078-d2a40a4f3498-kube-api-access-kwhch\") on node \"crc\" DevicePath \"\"" Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.226270 4605 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/46085e14-7d7e-490a-8078-d2a40a4f3498-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.226290 4605 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Oct 01 14:04:00 crc kubenswrapper[4605]: W1001 14:04:00.226464 4605 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/46085e14-7d7e-490a-8078-d2a40a4f3498/volumes/kubernetes.io~secret/combined-ca-bundle Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.226488 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/46085e14-7d7e-490a-8078-d2a40a4f3498-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "46085e14-7d7e-490a-8078-d2a40a4f3498" (UID: "46085e14-7d7e-490a-8078-d2a40a4f3498"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.229218 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/46085e14-7d7e-490a-8078-d2a40a4f3498-config-data" (OuterVolumeSpecName: "config-data") pod "46085e14-7d7e-490a-8078-d2a40a4f3498" (UID: "46085e14-7d7e-490a-8078-d2a40a4f3498"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.229286 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/46085e14-7d7e-490a-8078-d2a40a4f3498-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "46085e14-7d7e-490a-8078-d2a40a4f3498" (UID: "46085e14-7d7e-490a-8078-d2a40a4f3498"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.247893 4605 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.329597 4605 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.329628 4605 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/46085e14-7d7e-490a-8078-d2a40a4f3498-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.329637 4605 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46085e14-7d7e-490a-8078-d2a40a4f3498-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.329649 4605 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/46085e14-7d7e-490a-8078-d2a40a4f3498-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.359525 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-76d9c5f468-cdf7s" event={"ID":"33bae256-76ac-47a8-b5d6-84df403fb294","Type":"ContainerDied","Data":"0c99f574224658edde3e2cfacf098b4c7cc1b9344c78981a2e9ea8eefca66518"} Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.359630 4605 scope.go:117] "RemoveContainer" containerID="b08d83199c13552e355c429f0810c2d68f6be6bd4e1d171dd46d1f00abb8b248" Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.359796 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-76d9c5f468-cdf7s" Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.374725 4605 generic.go:334] "Generic (PLEG): container finished" podID="46085e14-7d7e-490a-8078-d2a40a4f3498" containerID="ebe349f06b2cd7eb9c3f8125823bdea98434ad65928df64bbd74ad27f86de5c1" exitCode=0 Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.374772 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"46085e14-7d7e-490a-8078-d2a40a4f3498","Type":"ContainerDied","Data":"ebe349f06b2cd7eb9c3f8125823bdea98434ad65928df64bbd74ad27f86de5c1"} Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.374803 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"46085e14-7d7e-490a-8078-d2a40a4f3498","Type":"ContainerDied","Data":"56b63fe7ec1291dd82a36bf5af722665830764d6e1581f43d2928324525ad54c"} Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.374868 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.400812 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-76d9c5f468-cdf7s"] Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.403419 4605 scope.go:117] "RemoveContainer" containerID="9295dbb20a5defeafe207475f4bb052a4a5c0848cd78d13c0bc653ff9174bd58" Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.412919 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-76d9c5f468-cdf7s"] Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.442233 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.448824 4605 scope.go:117] "RemoveContainer" containerID="ebe349f06b2cd7eb9c3f8125823bdea98434ad65928df64bbd74ad27f86de5c1" Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.460428 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.490356 4605 scope.go:117] "RemoveContainer" containerID="1f9765bf6a057c7fb2b1034b8d8edffcad1efa83c7bc3f02af8c15cd97df0a4a" Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.493239 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Oct 01 14:04:00 crc kubenswrapper[4605]: E1001 14:04:00.493869 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5e8f6b7-5048-4809-96df-46f7bf6dba10" containerName="mariadb-database-create" Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.493892 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5e8f6b7-5048-4809-96df-46f7bf6dba10" containerName="mariadb-database-create" Oct 01 14:04:00 crc kubenswrapper[4605]: E1001 14:04:00.493905 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="33bae256-76ac-47a8-b5d6-84df403fb294" containerName="neutron-httpd" Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.493915 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="33bae256-76ac-47a8-b5d6-84df403fb294" containerName="neutron-httpd" Oct 01 14:04:00 crc kubenswrapper[4605]: E1001 14:04:00.493932 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99b2e3e9-ede9-4ece-9e25-8d4900e33264" containerName="mariadb-database-create" Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.493941 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="99b2e3e9-ede9-4ece-9e25-8d4900e33264" containerName="mariadb-database-create" Oct 01 14:04:00 crc kubenswrapper[4605]: E1001 14:04:00.493959 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19af3ac5-8b1a-4301-88a2-96ae085ee9e0" containerName="horizon" Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.493967 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="19af3ac5-8b1a-4301-88a2-96ae085ee9e0" containerName="horizon" Oct 01 14:04:00 crc kubenswrapper[4605]: E1001 14:04:00.493992 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="33bae256-76ac-47a8-b5d6-84df403fb294" containerName="neutron-api" Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.493999 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="33bae256-76ac-47a8-b5d6-84df403fb294" containerName="neutron-api" Oct 01 14:04:00 crc kubenswrapper[4605]: E1001 14:04:00.494014 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46085e14-7d7e-490a-8078-d2a40a4f3498" containerName="glance-httpd" Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.494022 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="46085e14-7d7e-490a-8078-d2a40a4f3498" containerName="glance-httpd" Oct 01 14:04:00 crc kubenswrapper[4605]: E1001 14:04:00.494035 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46085e14-7d7e-490a-8078-d2a40a4f3498" containerName="glance-log" Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.494043 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="46085e14-7d7e-490a-8078-d2a40a4f3498" containerName="glance-log" Oct 01 14:04:00 crc kubenswrapper[4605]: E1001 14:04:00.494060 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1186ce81-fb42-4a2e-ad07-6a63d580a2b1" containerName="mariadb-database-create" Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.494070 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="1186ce81-fb42-4a2e-ad07-6a63d580a2b1" containerName="mariadb-database-create" Oct 01 14:04:00 crc kubenswrapper[4605]: E1001 14:04:00.494106 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19af3ac5-8b1a-4301-88a2-96ae085ee9e0" containerName="horizon-log" Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.494115 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="19af3ac5-8b1a-4301-88a2-96ae085ee9e0" containerName="horizon-log" Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.494344 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="19af3ac5-8b1a-4301-88a2-96ae085ee9e0" containerName="horizon-log" Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.494367 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="46085e14-7d7e-490a-8078-d2a40a4f3498" containerName="glance-log" Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.494382 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="1186ce81-fb42-4a2e-ad07-6a63d580a2b1" containerName="mariadb-database-create" Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.494394 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="99b2e3e9-ede9-4ece-9e25-8d4900e33264" containerName="mariadb-database-create" Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.494412 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="33bae256-76ac-47a8-b5d6-84df403fb294" containerName="neutron-httpd" Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.494421 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="19af3ac5-8b1a-4301-88a2-96ae085ee9e0" containerName="horizon" Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.494435 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="46085e14-7d7e-490a-8078-d2a40a4f3498" containerName="glance-httpd" Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.494443 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="33bae256-76ac-47a8-b5d6-84df403fb294" containerName="neutron-api" Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.494458 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="c5e8f6b7-5048-4809-96df-46f7bf6dba10" containerName="mariadb-database-create" Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.495696 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.509222 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.511441 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.522570 4605 scope.go:117] "RemoveContainer" containerID="ebe349f06b2cd7eb9c3f8125823bdea98434ad65928df64bbd74ad27f86de5c1" Oct 01 14:04:00 crc kubenswrapper[4605]: E1001 14:04:00.523149 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ebe349f06b2cd7eb9c3f8125823bdea98434ad65928df64bbd74ad27f86de5c1\": container with ID starting with ebe349f06b2cd7eb9c3f8125823bdea98434ad65928df64bbd74ad27f86de5c1 not found: ID does not exist" containerID="ebe349f06b2cd7eb9c3f8125823bdea98434ad65928df64bbd74ad27f86de5c1" Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.523172 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ebe349f06b2cd7eb9c3f8125823bdea98434ad65928df64bbd74ad27f86de5c1"} err="failed to get container status \"ebe349f06b2cd7eb9c3f8125823bdea98434ad65928df64bbd74ad27f86de5c1\": rpc error: code = NotFound desc = could not find container \"ebe349f06b2cd7eb9c3f8125823bdea98434ad65928df64bbd74ad27f86de5c1\": container with ID starting with ebe349f06b2cd7eb9c3f8125823bdea98434ad65928df64bbd74ad27f86de5c1 not found: ID does not exist" Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.523193 4605 scope.go:117] "RemoveContainer" containerID="1f9765bf6a057c7fb2b1034b8d8edffcad1efa83c7bc3f02af8c15cd97df0a4a" Oct 01 14:04:00 crc kubenswrapper[4605]: E1001 14:04:00.523418 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1f9765bf6a057c7fb2b1034b8d8edffcad1efa83c7bc3f02af8c15cd97df0a4a\": container with ID starting with 1f9765bf6a057c7fb2b1034b8d8edffcad1efa83c7bc3f02af8c15cd97df0a4a not found: ID does not exist" containerID="1f9765bf6a057c7fb2b1034b8d8edffcad1efa83c7bc3f02af8c15cd97df0a4a" Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.523436 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1f9765bf6a057c7fb2b1034b8d8edffcad1efa83c7bc3f02af8c15cd97df0a4a"} err="failed to get container status \"1f9765bf6a057c7fb2b1034b8d8edffcad1efa83c7bc3f02af8c15cd97df0a4a\": rpc error: code = NotFound desc = could not find container \"1f9765bf6a057c7fb2b1034b8d8edffcad1efa83c7bc3f02af8c15cd97df0a4a\": container with ID starting with 1f9765bf6a057c7fb2b1034b8d8edffcad1efa83c7bc3f02af8c15cd97df0a4a not found: ID does not exist" Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.528052 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.638600 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/eac8da6a-ca40-4b05-b525-d645a20f3592-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"eac8da6a-ca40-4b05-b525-d645a20f3592\") " pod="openstack/glance-default-external-api-0" Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.638870 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eac8da6a-ca40-4b05-b525-d645a20f3592-scripts\") pod \"glance-default-external-api-0\" (UID: \"eac8da6a-ca40-4b05-b525-d645a20f3592\") " pod="openstack/glance-default-external-api-0" Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.638924 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"eac8da6a-ca40-4b05-b525-d645a20f3592\") " pod="openstack/glance-default-external-api-0" Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.639130 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eac8da6a-ca40-4b05-b525-d645a20f3592-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"eac8da6a-ca40-4b05-b525-d645a20f3592\") " pod="openstack/glance-default-external-api-0" Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.639324 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/eac8da6a-ca40-4b05-b525-d645a20f3592-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"eac8da6a-ca40-4b05-b525-d645a20f3592\") " pod="openstack/glance-default-external-api-0" Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.639387 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eac8da6a-ca40-4b05-b525-d645a20f3592-config-data\") pod \"glance-default-external-api-0\" (UID: \"eac8da6a-ca40-4b05-b525-d645a20f3592\") " pod="openstack/glance-default-external-api-0" Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.639426 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eac8da6a-ca40-4b05-b525-d645a20f3592-logs\") pod \"glance-default-external-api-0\" (UID: \"eac8da6a-ca40-4b05-b525-d645a20f3592\") " pod="openstack/glance-default-external-api-0" Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.639469 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4s68q\" (UniqueName: \"kubernetes.io/projected/eac8da6a-ca40-4b05-b525-d645a20f3592-kube-api-access-4s68q\") pod \"glance-default-external-api-0\" (UID: \"eac8da6a-ca40-4b05-b525-d645a20f3592\") " pod="openstack/glance-default-external-api-0" Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.741193 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/eac8da6a-ca40-4b05-b525-d645a20f3592-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"eac8da6a-ca40-4b05-b525-d645a20f3592\") " pod="openstack/glance-default-external-api-0" Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.741245 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eac8da6a-ca40-4b05-b525-d645a20f3592-config-data\") pod \"glance-default-external-api-0\" (UID: \"eac8da6a-ca40-4b05-b525-d645a20f3592\") " pod="openstack/glance-default-external-api-0" Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.741289 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eac8da6a-ca40-4b05-b525-d645a20f3592-logs\") pod \"glance-default-external-api-0\" (UID: \"eac8da6a-ca40-4b05-b525-d645a20f3592\") " pod="openstack/glance-default-external-api-0" Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.741317 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4s68q\" (UniqueName: \"kubernetes.io/projected/eac8da6a-ca40-4b05-b525-d645a20f3592-kube-api-access-4s68q\") pod \"glance-default-external-api-0\" (UID: \"eac8da6a-ca40-4b05-b525-d645a20f3592\") " pod="openstack/glance-default-external-api-0" Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.741345 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/eac8da6a-ca40-4b05-b525-d645a20f3592-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"eac8da6a-ca40-4b05-b525-d645a20f3592\") " pod="openstack/glance-default-external-api-0" Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.741400 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eac8da6a-ca40-4b05-b525-d645a20f3592-scripts\") pod \"glance-default-external-api-0\" (UID: \"eac8da6a-ca40-4b05-b525-d645a20f3592\") " pod="openstack/glance-default-external-api-0" Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.741429 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"eac8da6a-ca40-4b05-b525-d645a20f3592\") " pod="openstack/glance-default-external-api-0" Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.741479 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eac8da6a-ca40-4b05-b525-d645a20f3592-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"eac8da6a-ca40-4b05-b525-d645a20f3592\") " pod="openstack/glance-default-external-api-0" Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.745631 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/eac8da6a-ca40-4b05-b525-d645a20f3592-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"eac8da6a-ca40-4b05-b525-d645a20f3592\") " pod="openstack/glance-default-external-api-0" Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.747671 4605 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"eac8da6a-ca40-4b05-b525-d645a20f3592\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/glance-default-external-api-0" Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.750390 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eac8da6a-ca40-4b05-b525-d645a20f3592-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"eac8da6a-ca40-4b05-b525-d645a20f3592\") " pod="openstack/glance-default-external-api-0" Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.750717 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eac8da6a-ca40-4b05-b525-d645a20f3592-logs\") pod \"glance-default-external-api-0\" (UID: \"eac8da6a-ca40-4b05-b525-d645a20f3592\") " pod="openstack/glance-default-external-api-0" Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.756648 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eac8da6a-ca40-4b05-b525-d645a20f3592-scripts\") pod \"glance-default-external-api-0\" (UID: \"eac8da6a-ca40-4b05-b525-d645a20f3592\") " pod="openstack/glance-default-external-api-0" Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.757136 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/eac8da6a-ca40-4b05-b525-d645a20f3592-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"eac8da6a-ca40-4b05-b525-d645a20f3592\") " pod="openstack/glance-default-external-api-0" Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.762426 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eac8da6a-ca40-4b05-b525-d645a20f3592-config-data\") pod \"glance-default-external-api-0\" (UID: \"eac8da6a-ca40-4b05-b525-d645a20f3592\") " pod="openstack/glance-default-external-api-0" Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.772023 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4s68q\" (UniqueName: \"kubernetes.io/projected/eac8da6a-ca40-4b05-b525-d645a20f3592-kube-api-access-4s68q\") pod \"glance-default-external-api-0\" (UID: \"eac8da6a-ca40-4b05-b525-d645a20f3592\") " pod="openstack/glance-default-external-api-0" Oct 01 14:04:00 crc kubenswrapper[4605]: I1001 14:04:00.840390 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"eac8da6a-ca40-4b05-b525-d645a20f3592\") " pod="openstack/glance-default-external-api-0" Oct 01 14:04:01 crc kubenswrapper[4605]: I1001 14:04:01.121570 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 01 14:04:01 crc kubenswrapper[4605]: I1001 14:04:01.402826 4605 generic.go:334] "Generic (PLEG): container finished" podID="8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d" containerID="7635e8638a970607c746051057cbf7b68b7eaa1b3669d8943796c103b67ffd15" exitCode=0 Oct 01 14:04:01 crc kubenswrapper[4605]: I1001 14:04:01.402880 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d","Type":"ContainerDied","Data":"7635e8638a970607c746051057cbf7b68b7eaa1b3669d8943796c103b67ffd15"} Oct 01 14:04:01 crc kubenswrapper[4605]: I1001 14:04:01.900867 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 01 14:04:01 crc kubenswrapper[4605]: I1001 14:04:01.957723 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="33bae256-76ac-47a8-b5d6-84df403fb294" path="/var/lib/kubelet/pods/33bae256-76ac-47a8-b5d6-84df403fb294/volumes" Oct 01 14:04:01 crc kubenswrapper[4605]: I1001 14:04:01.958684 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="46085e14-7d7e-490a-8078-d2a40a4f3498" path="/var/lib/kubelet/pods/46085e14-7d7e-490a-8078-d2a40a4f3498/volumes" Oct 01 14:04:01 crc kubenswrapper[4605]: I1001 14:04:01.961247 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 01 14:04:02 crc kubenswrapper[4605]: I1001 14:04:02.080694 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d-combined-ca-bundle\") pod \"8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d\" (UID: \"8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d\") " Oct 01 14:04:02 crc kubenswrapper[4605]: I1001 14:04:02.080768 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d-logs\") pod \"8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d\" (UID: \"8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d\") " Oct 01 14:04:02 crc kubenswrapper[4605]: I1001 14:04:02.080846 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d-internal-tls-certs\") pod \"8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d\" (UID: \"8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d\") " Oct 01 14:04:02 crc kubenswrapper[4605]: I1001 14:04:02.080890 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d\" (UID: \"8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d\") " Oct 01 14:04:02 crc kubenswrapper[4605]: I1001 14:04:02.080935 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d-scripts\") pod \"8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d\" (UID: \"8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d\") " Oct 01 14:04:02 crc kubenswrapper[4605]: I1001 14:04:02.081005 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d-config-data\") pod \"8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d\" (UID: \"8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d\") " Oct 01 14:04:02 crc kubenswrapper[4605]: I1001 14:04:02.081050 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d-httpd-run\") pod \"8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d\" (UID: \"8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d\") " Oct 01 14:04:02 crc kubenswrapper[4605]: I1001 14:04:02.081187 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wcmkf\" (UniqueName: \"kubernetes.io/projected/8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d-kube-api-access-wcmkf\") pod \"8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d\" (UID: \"8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d\") " Oct 01 14:04:02 crc kubenswrapper[4605]: I1001 14:04:02.082832 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d-logs" (OuterVolumeSpecName: "logs") pod "8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d" (UID: "8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:04:02 crc kubenswrapper[4605]: I1001 14:04:02.084685 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d" (UID: "8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:04:02 crc kubenswrapper[4605]: I1001 14:04:02.094140 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "glance") pod "8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d" (UID: "8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 01 14:04:02 crc kubenswrapper[4605]: I1001 14:04:02.104346 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d-scripts" (OuterVolumeSpecName: "scripts") pod "8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d" (UID: "8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:04:02 crc kubenswrapper[4605]: I1001 14:04:02.136144 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d-kube-api-access-wcmkf" (OuterVolumeSpecName: "kube-api-access-wcmkf") pod "8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d" (UID: "8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d"). InnerVolumeSpecName "kube-api-access-wcmkf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:04:02 crc kubenswrapper[4605]: I1001 14:04:02.183571 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wcmkf\" (UniqueName: \"kubernetes.io/projected/8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d-kube-api-access-wcmkf\") on node \"crc\" DevicePath \"\"" Oct 01 14:04:02 crc kubenswrapper[4605]: I1001 14:04:02.184156 4605 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d-logs\") on node \"crc\" DevicePath \"\"" Oct 01 14:04:02 crc kubenswrapper[4605]: I1001 14:04:02.184274 4605 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Oct 01 14:04:02 crc kubenswrapper[4605]: I1001 14:04:02.184361 4605 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 14:04:02 crc kubenswrapper[4605]: I1001 14:04:02.184619 4605 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 01 14:04:02 crc kubenswrapper[4605]: I1001 14:04:02.194340 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d" (UID: "8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:04:02 crc kubenswrapper[4605]: I1001 14:04:02.229695 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d-config-data" (OuterVolumeSpecName: "config-data") pod "8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d" (UID: "8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:04:02 crc kubenswrapper[4605]: I1001 14:04:02.241167 4605 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Oct 01 14:04:02 crc kubenswrapper[4605]: I1001 14:04:02.252416 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d" (UID: "8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:04:02 crc kubenswrapper[4605]: I1001 14:04:02.287070 4605 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 14:04:02 crc kubenswrapper[4605]: I1001 14:04:02.287126 4605 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 01 14:04:02 crc kubenswrapper[4605]: I1001 14:04:02.287135 4605 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Oct 01 14:04:02 crc kubenswrapper[4605]: I1001 14:04:02.287144 4605 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 14:04:02 crc kubenswrapper[4605]: I1001 14:04:02.430884 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d","Type":"ContainerDied","Data":"d7b8722a83fa5afa1facb63a139d20f997ec03365a44b75aab30cfba792ae70e"} Oct 01 14:04:02 crc kubenswrapper[4605]: I1001 14:04:02.430935 4605 scope.go:117] "RemoveContainer" containerID="7635e8638a970607c746051057cbf7b68b7eaa1b3669d8943796c103b67ffd15" Oct 01 14:04:02 crc kubenswrapper[4605]: I1001 14:04:02.431057 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 01 14:04:02 crc kubenswrapper[4605]: I1001 14:04:02.437938 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"eac8da6a-ca40-4b05-b525-d645a20f3592","Type":"ContainerStarted","Data":"7a42399eae69b89cec27937759457e166f8fa08af2e11a27011a50395ac757c6"} Oct 01 14:04:02 crc kubenswrapper[4605]: I1001 14:04:02.484083 4605 scope.go:117] "RemoveContainer" containerID="292676769d30f49539bb03392db2114220120cee6f940246def06af3828c68ac" Oct 01 14:04:02 crc kubenswrapper[4605]: I1001 14:04:02.485200 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 01 14:04:02 crc kubenswrapper[4605]: I1001 14:04:02.500672 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 01 14:04:02 crc kubenswrapper[4605]: I1001 14:04:02.509084 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 01 14:04:02 crc kubenswrapper[4605]: E1001 14:04:02.509526 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d" containerName="glance-log" Oct 01 14:04:02 crc kubenswrapper[4605]: I1001 14:04:02.509546 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d" containerName="glance-log" Oct 01 14:04:02 crc kubenswrapper[4605]: E1001 14:04:02.509572 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d" containerName="glance-httpd" Oct 01 14:04:02 crc kubenswrapper[4605]: I1001 14:04:02.509580 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d" containerName="glance-httpd" Oct 01 14:04:02 crc kubenswrapper[4605]: I1001 14:04:02.509752 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d" containerName="glance-httpd" Oct 01 14:04:02 crc kubenswrapper[4605]: I1001 14:04:02.509797 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d" containerName="glance-log" Oct 01 14:04:02 crc kubenswrapper[4605]: I1001 14:04:02.510886 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 01 14:04:02 crc kubenswrapper[4605]: I1001 14:04:02.518695 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 01 14:04:02 crc kubenswrapper[4605]: I1001 14:04:02.519335 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Oct 01 14:04:02 crc kubenswrapper[4605]: I1001 14:04:02.519592 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Oct 01 14:04:02 crc kubenswrapper[4605]: I1001 14:04:02.693176 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2cbd5fa3-f41d-4f68-a0f3-ef876a17a0b6-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"2cbd5fa3-f41d-4f68-a0f3-ef876a17a0b6\") " pod="openstack/glance-default-internal-api-0" Oct 01 14:04:02 crc kubenswrapper[4605]: I1001 14:04:02.693223 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cbd5fa3-f41d-4f68-a0f3-ef876a17a0b6-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"2cbd5fa3-f41d-4f68-a0f3-ef876a17a0b6\") " pod="openstack/glance-default-internal-api-0" Oct 01 14:04:02 crc kubenswrapper[4605]: I1001 14:04:02.693254 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"2cbd5fa3-f41d-4f68-a0f3-ef876a17a0b6\") " pod="openstack/glance-default-internal-api-0" Oct 01 14:04:02 crc kubenswrapper[4605]: I1001 14:04:02.693288 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2cbd5fa3-f41d-4f68-a0f3-ef876a17a0b6-config-data\") pod \"glance-default-internal-api-0\" (UID: \"2cbd5fa3-f41d-4f68-a0f3-ef876a17a0b6\") " pod="openstack/glance-default-internal-api-0" Oct 01 14:04:02 crc kubenswrapper[4605]: I1001 14:04:02.693329 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lv9r4\" (UniqueName: \"kubernetes.io/projected/2cbd5fa3-f41d-4f68-a0f3-ef876a17a0b6-kube-api-access-lv9r4\") pod \"glance-default-internal-api-0\" (UID: \"2cbd5fa3-f41d-4f68-a0f3-ef876a17a0b6\") " pod="openstack/glance-default-internal-api-0" Oct 01 14:04:02 crc kubenswrapper[4605]: I1001 14:04:02.693347 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/2cbd5fa3-f41d-4f68-a0f3-ef876a17a0b6-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"2cbd5fa3-f41d-4f68-a0f3-ef876a17a0b6\") " pod="openstack/glance-default-internal-api-0" Oct 01 14:04:02 crc kubenswrapper[4605]: I1001 14:04:02.693404 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2cbd5fa3-f41d-4f68-a0f3-ef876a17a0b6-logs\") pod \"glance-default-internal-api-0\" (UID: \"2cbd5fa3-f41d-4f68-a0f3-ef876a17a0b6\") " pod="openstack/glance-default-internal-api-0" Oct 01 14:04:02 crc kubenswrapper[4605]: I1001 14:04:02.693427 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2cbd5fa3-f41d-4f68-a0f3-ef876a17a0b6-scripts\") pod \"glance-default-internal-api-0\" (UID: \"2cbd5fa3-f41d-4f68-a0f3-ef876a17a0b6\") " pod="openstack/glance-default-internal-api-0" Oct 01 14:04:02 crc kubenswrapper[4605]: I1001 14:04:02.795040 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2cbd5fa3-f41d-4f68-a0f3-ef876a17a0b6-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"2cbd5fa3-f41d-4f68-a0f3-ef876a17a0b6\") " pod="openstack/glance-default-internal-api-0" Oct 01 14:04:02 crc kubenswrapper[4605]: I1001 14:04:02.795103 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cbd5fa3-f41d-4f68-a0f3-ef876a17a0b6-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"2cbd5fa3-f41d-4f68-a0f3-ef876a17a0b6\") " pod="openstack/glance-default-internal-api-0" Oct 01 14:04:02 crc kubenswrapper[4605]: I1001 14:04:02.795129 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"2cbd5fa3-f41d-4f68-a0f3-ef876a17a0b6\") " pod="openstack/glance-default-internal-api-0" Oct 01 14:04:02 crc kubenswrapper[4605]: I1001 14:04:02.795165 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2cbd5fa3-f41d-4f68-a0f3-ef876a17a0b6-config-data\") pod \"glance-default-internal-api-0\" (UID: \"2cbd5fa3-f41d-4f68-a0f3-ef876a17a0b6\") " pod="openstack/glance-default-internal-api-0" Oct 01 14:04:02 crc kubenswrapper[4605]: I1001 14:04:02.795212 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lv9r4\" (UniqueName: \"kubernetes.io/projected/2cbd5fa3-f41d-4f68-a0f3-ef876a17a0b6-kube-api-access-lv9r4\") pod \"glance-default-internal-api-0\" (UID: \"2cbd5fa3-f41d-4f68-a0f3-ef876a17a0b6\") " pod="openstack/glance-default-internal-api-0" Oct 01 14:04:02 crc kubenswrapper[4605]: I1001 14:04:02.795227 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/2cbd5fa3-f41d-4f68-a0f3-ef876a17a0b6-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"2cbd5fa3-f41d-4f68-a0f3-ef876a17a0b6\") " pod="openstack/glance-default-internal-api-0" Oct 01 14:04:02 crc kubenswrapper[4605]: I1001 14:04:02.795288 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2cbd5fa3-f41d-4f68-a0f3-ef876a17a0b6-logs\") pod \"glance-default-internal-api-0\" (UID: \"2cbd5fa3-f41d-4f68-a0f3-ef876a17a0b6\") " pod="openstack/glance-default-internal-api-0" Oct 01 14:04:02 crc kubenswrapper[4605]: I1001 14:04:02.795311 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2cbd5fa3-f41d-4f68-a0f3-ef876a17a0b6-scripts\") pod \"glance-default-internal-api-0\" (UID: \"2cbd5fa3-f41d-4f68-a0f3-ef876a17a0b6\") " pod="openstack/glance-default-internal-api-0" Oct 01 14:04:02 crc kubenswrapper[4605]: I1001 14:04:02.799860 4605 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"2cbd5fa3-f41d-4f68-a0f3-ef876a17a0b6\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/glance-default-internal-api-0" Oct 01 14:04:02 crc kubenswrapper[4605]: I1001 14:04:02.800221 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/2cbd5fa3-f41d-4f68-a0f3-ef876a17a0b6-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"2cbd5fa3-f41d-4f68-a0f3-ef876a17a0b6\") " pod="openstack/glance-default-internal-api-0" Oct 01 14:04:02 crc kubenswrapper[4605]: I1001 14:04:02.800681 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2cbd5fa3-f41d-4f68-a0f3-ef876a17a0b6-logs\") pod \"glance-default-internal-api-0\" (UID: \"2cbd5fa3-f41d-4f68-a0f3-ef876a17a0b6\") " pod="openstack/glance-default-internal-api-0" Oct 01 14:04:02 crc kubenswrapper[4605]: I1001 14:04:02.808406 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cbd5fa3-f41d-4f68-a0f3-ef876a17a0b6-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"2cbd5fa3-f41d-4f68-a0f3-ef876a17a0b6\") " pod="openstack/glance-default-internal-api-0" Oct 01 14:04:02 crc kubenswrapper[4605]: I1001 14:04:02.808792 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2cbd5fa3-f41d-4f68-a0f3-ef876a17a0b6-scripts\") pod \"glance-default-internal-api-0\" (UID: \"2cbd5fa3-f41d-4f68-a0f3-ef876a17a0b6\") " pod="openstack/glance-default-internal-api-0" Oct 01 14:04:02 crc kubenswrapper[4605]: I1001 14:04:02.809219 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2cbd5fa3-f41d-4f68-a0f3-ef876a17a0b6-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"2cbd5fa3-f41d-4f68-a0f3-ef876a17a0b6\") " pod="openstack/glance-default-internal-api-0" Oct 01 14:04:02 crc kubenswrapper[4605]: I1001 14:04:02.815473 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2cbd5fa3-f41d-4f68-a0f3-ef876a17a0b6-config-data\") pod \"glance-default-internal-api-0\" (UID: \"2cbd5fa3-f41d-4f68-a0f3-ef876a17a0b6\") " pod="openstack/glance-default-internal-api-0" Oct 01 14:04:02 crc kubenswrapper[4605]: I1001 14:04:02.828857 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lv9r4\" (UniqueName: \"kubernetes.io/projected/2cbd5fa3-f41d-4f68-a0f3-ef876a17a0b6-kube-api-access-lv9r4\") pod \"glance-default-internal-api-0\" (UID: \"2cbd5fa3-f41d-4f68-a0f3-ef876a17a0b6\") " pod="openstack/glance-default-internal-api-0" Oct 01 14:04:02 crc kubenswrapper[4605]: I1001 14:04:02.833696 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"2cbd5fa3-f41d-4f68-a0f3-ef876a17a0b6\") " pod="openstack/glance-default-internal-api-0" Oct 01 14:04:02 crc kubenswrapper[4605]: I1001 14:04:02.864076 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 01 14:04:03 crc kubenswrapper[4605]: I1001 14:04:03.286418 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-f398-account-create-69t8k"] Oct 01 14:04:03 crc kubenswrapper[4605]: I1001 14:04:03.288353 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-f398-account-create-69t8k" Oct 01 14:04:03 crc kubenswrapper[4605]: I1001 14:04:03.292239 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Oct 01 14:04:03 crc kubenswrapper[4605]: I1001 14:04:03.298367 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-f398-account-create-69t8k"] Oct 01 14:04:03 crc kubenswrapper[4605]: I1001 14:04:03.414526 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kxs8n\" (UniqueName: \"kubernetes.io/projected/dc6e55f0-8554-4e08-a425-104179aecfd3-kube-api-access-kxs8n\") pod \"nova-api-f398-account-create-69t8k\" (UID: \"dc6e55f0-8554-4e08-a425-104179aecfd3\") " pod="openstack/nova-api-f398-account-create-69t8k" Oct 01 14:04:03 crc kubenswrapper[4605]: I1001 14:04:03.465641 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"eac8da6a-ca40-4b05-b525-d645a20f3592","Type":"ContainerStarted","Data":"2d4c51bee4008b2a85d5f4a48a7a7bee46e773b9e3767f5f92d3abccad921ea1"} Oct 01 14:04:03 crc kubenswrapper[4605]: I1001 14:04:03.488085 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-b33b-account-create-k9fvm"] Oct 01 14:04:03 crc kubenswrapper[4605]: I1001 14:04:03.489236 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-b33b-account-create-k9fvm" Oct 01 14:04:03 crc kubenswrapper[4605]: I1001 14:04:03.492420 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Oct 01 14:04:03 crc kubenswrapper[4605]: I1001 14:04:03.508842 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-b33b-account-create-k9fvm"] Oct 01 14:04:03 crc kubenswrapper[4605]: I1001 14:04:03.517460 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kxs8n\" (UniqueName: \"kubernetes.io/projected/dc6e55f0-8554-4e08-a425-104179aecfd3-kube-api-access-kxs8n\") pod \"nova-api-f398-account-create-69t8k\" (UID: \"dc6e55f0-8554-4e08-a425-104179aecfd3\") " pod="openstack/nova-api-f398-account-create-69t8k" Oct 01 14:04:03 crc kubenswrapper[4605]: I1001 14:04:03.569147 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kxs8n\" (UniqueName: \"kubernetes.io/projected/dc6e55f0-8554-4e08-a425-104179aecfd3-kube-api-access-kxs8n\") pod \"nova-api-f398-account-create-69t8k\" (UID: \"dc6e55f0-8554-4e08-a425-104179aecfd3\") " pod="openstack/nova-api-f398-account-create-69t8k" Oct 01 14:04:03 crc kubenswrapper[4605]: I1001 14:04:03.598446 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 01 14:04:03 crc kubenswrapper[4605]: I1001 14:04:03.612366 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-f398-account-create-69t8k" Oct 01 14:04:03 crc kubenswrapper[4605]: I1001 14:04:03.620001 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bs84b\" (UniqueName: \"kubernetes.io/projected/4f2464d3-9ea7-482c-aa2b-f66d42141756-kube-api-access-bs84b\") pod \"nova-cell0-b33b-account-create-k9fvm\" (UID: \"4f2464d3-9ea7-482c-aa2b-f66d42141756\") " pod="openstack/nova-cell0-b33b-account-create-k9fvm" Oct 01 14:04:03 crc kubenswrapper[4605]: I1001 14:04:03.693765 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-e3c6-account-create-gh4rs"] Oct 01 14:04:03 crc kubenswrapper[4605]: I1001 14:04:03.695198 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-e3c6-account-create-gh4rs" Oct 01 14:04:03 crc kubenswrapper[4605]: I1001 14:04:03.702481 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Oct 01 14:04:03 crc kubenswrapper[4605]: I1001 14:04:03.717884 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-e3c6-account-create-gh4rs"] Oct 01 14:04:03 crc kubenswrapper[4605]: I1001 14:04:03.732818 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bs84b\" (UniqueName: \"kubernetes.io/projected/4f2464d3-9ea7-482c-aa2b-f66d42141756-kube-api-access-bs84b\") pod \"nova-cell0-b33b-account-create-k9fvm\" (UID: \"4f2464d3-9ea7-482c-aa2b-f66d42141756\") " pod="openstack/nova-cell0-b33b-account-create-k9fvm" Oct 01 14:04:03 crc kubenswrapper[4605]: I1001 14:04:03.734179 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lbq4t\" (UniqueName: \"kubernetes.io/projected/5ce5e6eb-9c71-46d4-938d-707d1586936d-kube-api-access-lbq4t\") pod \"nova-cell1-e3c6-account-create-gh4rs\" (UID: \"5ce5e6eb-9c71-46d4-938d-707d1586936d\") " pod="openstack/nova-cell1-e3c6-account-create-gh4rs" Oct 01 14:04:03 crc kubenswrapper[4605]: I1001 14:04:03.762494 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bs84b\" (UniqueName: \"kubernetes.io/projected/4f2464d3-9ea7-482c-aa2b-f66d42141756-kube-api-access-bs84b\") pod \"nova-cell0-b33b-account-create-k9fvm\" (UID: \"4f2464d3-9ea7-482c-aa2b-f66d42141756\") " pod="openstack/nova-cell0-b33b-account-create-k9fvm" Oct 01 14:04:03 crc kubenswrapper[4605]: I1001 14:04:03.813435 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-b33b-account-create-k9fvm" Oct 01 14:04:03 crc kubenswrapper[4605]: I1001 14:04:03.836616 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lbq4t\" (UniqueName: \"kubernetes.io/projected/5ce5e6eb-9c71-46d4-938d-707d1586936d-kube-api-access-lbq4t\") pod \"nova-cell1-e3c6-account-create-gh4rs\" (UID: \"5ce5e6eb-9c71-46d4-938d-707d1586936d\") " pod="openstack/nova-cell1-e3c6-account-create-gh4rs" Oct 01 14:04:03 crc kubenswrapper[4605]: I1001 14:04:03.864362 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lbq4t\" (UniqueName: \"kubernetes.io/projected/5ce5e6eb-9c71-46d4-938d-707d1586936d-kube-api-access-lbq4t\") pod \"nova-cell1-e3c6-account-create-gh4rs\" (UID: \"5ce5e6eb-9c71-46d4-938d-707d1586936d\") " pod="openstack/nova-cell1-e3c6-account-create-gh4rs" Oct 01 14:04:03 crc kubenswrapper[4605]: I1001 14:04:03.947481 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d" path="/var/lib/kubelet/pods/8ef1476f-27e2-4fa6-ab6a-3f4396e7bb1d/volumes" Oct 01 14:04:04 crc kubenswrapper[4605]: I1001 14:04:04.029605 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-e3c6-account-create-gh4rs" Oct 01 14:04:04 crc kubenswrapper[4605]: I1001 14:04:04.504499 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-f398-account-create-69t8k"] Oct 01 14:04:04 crc kubenswrapper[4605]: I1001 14:04:04.508873 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"eac8da6a-ca40-4b05-b525-d645a20f3592","Type":"ContainerStarted","Data":"fd2a81f6b2d1fdb92fb82b288ad8eee6e1a0f43bcbda86ec8ee5f0a926573d1c"} Oct 01 14:04:04 crc kubenswrapper[4605]: I1001 14:04:04.520295 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"2cbd5fa3-f41d-4f68-a0f3-ef876a17a0b6","Type":"ContainerStarted","Data":"201ba8ed0862fc164b80c1ae588be36553d62dfef7bb842f25a556b1973372c6"} Oct 01 14:04:04 crc kubenswrapper[4605]: I1001 14:04:04.569429 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=4.569406535 podStartE2EDuration="4.569406535s" podCreationTimestamp="2025-10-01 14:04:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 14:04:04.56050525 +0000 UTC m=+1167.304481458" watchObservedRunningTime="2025-10-01 14:04:04.569406535 +0000 UTC m=+1167.313382743" Oct 01 14:04:04 crc kubenswrapper[4605]: I1001 14:04:04.635982 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-b33b-account-create-k9fvm"] Oct 01 14:04:04 crc kubenswrapper[4605]: W1001 14:04:04.686775 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4f2464d3_9ea7_482c_aa2b_f66d42141756.slice/crio-90d01a1f10cda04e6512f996dac939f3febb40857a783b4c008d1565b86e6dc1 WatchSource:0}: Error finding container 90d01a1f10cda04e6512f996dac939f3febb40857a783b4c008d1565b86e6dc1: Status 404 returned error can't find the container with id 90d01a1f10cda04e6512f996dac939f3febb40857a783b4c008d1565b86e6dc1 Oct 01 14:04:04 crc kubenswrapper[4605]: I1001 14:04:04.730834 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-e3c6-account-create-gh4rs"] Oct 01 14:04:05 crc kubenswrapper[4605]: I1001 14:04:05.556632 4605 generic.go:334] "Generic (PLEG): container finished" podID="c601edbd-d09b-48b1-bc0b-40c01b76468c" containerID="aaa16ea88fffe7c66bbc655644ce223b031f9f8d10ba1fcaf42752892fec8978" exitCode=0 Oct 01 14:04:05 crc kubenswrapper[4605]: I1001 14:04:05.557267 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c601edbd-d09b-48b1-bc0b-40c01b76468c","Type":"ContainerDied","Data":"aaa16ea88fffe7c66bbc655644ce223b031f9f8d10ba1fcaf42752892fec8978"} Oct 01 14:04:05 crc kubenswrapper[4605]: I1001 14:04:05.563080 4605 generic.go:334] "Generic (PLEG): container finished" podID="5ce5e6eb-9c71-46d4-938d-707d1586936d" containerID="7241abc649ee174058942ad5bee531d8101adcbac6b73a26098d508c089a73fd" exitCode=0 Oct 01 14:04:05 crc kubenswrapper[4605]: I1001 14:04:05.563149 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-e3c6-account-create-gh4rs" event={"ID":"5ce5e6eb-9c71-46d4-938d-707d1586936d","Type":"ContainerDied","Data":"7241abc649ee174058942ad5bee531d8101adcbac6b73a26098d508c089a73fd"} Oct 01 14:04:05 crc kubenswrapper[4605]: I1001 14:04:05.563175 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-e3c6-account-create-gh4rs" event={"ID":"5ce5e6eb-9c71-46d4-938d-707d1586936d","Type":"ContainerStarted","Data":"6c8f5b46be80def26c8f1ec798da5cfd0ca0cab704c733af22112dc07d8de5cf"} Oct 01 14:04:05 crc kubenswrapper[4605]: I1001 14:04:05.565534 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"2cbd5fa3-f41d-4f68-a0f3-ef876a17a0b6","Type":"ContainerStarted","Data":"fc2d6dcc26389a29fa0eb5073c27f716de045cb968e6ec5d156e949b57e06cb4"} Oct 01 14:04:05 crc kubenswrapper[4605]: I1001 14:04:05.567039 4605 generic.go:334] "Generic (PLEG): container finished" podID="dc6e55f0-8554-4e08-a425-104179aecfd3" containerID="d5fc73d95a746f544d49b3bce8808c5f11c17c8f4f4366de68f1b4db0fcb8524" exitCode=0 Oct 01 14:04:05 crc kubenswrapper[4605]: I1001 14:04:05.567173 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-f398-account-create-69t8k" event={"ID":"dc6e55f0-8554-4e08-a425-104179aecfd3","Type":"ContainerDied","Data":"d5fc73d95a746f544d49b3bce8808c5f11c17c8f4f4366de68f1b4db0fcb8524"} Oct 01 14:04:05 crc kubenswrapper[4605]: I1001 14:04:05.567200 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-f398-account-create-69t8k" event={"ID":"dc6e55f0-8554-4e08-a425-104179aecfd3","Type":"ContainerStarted","Data":"1acea1b22997cb195d571379dbf139d873e97c0a5dbbb32fa262f4486c2da362"} Oct 01 14:04:05 crc kubenswrapper[4605]: I1001 14:04:05.569425 4605 generic.go:334] "Generic (PLEG): container finished" podID="4f2464d3-9ea7-482c-aa2b-f66d42141756" containerID="961793ddfe31ddb97cbf276bdd0b26daaef6c3a80911c80711a2357fc2566656" exitCode=0 Oct 01 14:04:05 crc kubenswrapper[4605]: I1001 14:04:05.570447 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-b33b-account-create-k9fvm" event={"ID":"4f2464d3-9ea7-482c-aa2b-f66d42141756","Type":"ContainerDied","Data":"961793ddfe31ddb97cbf276bdd0b26daaef6c3a80911c80711a2357fc2566656"} Oct 01 14:04:05 crc kubenswrapper[4605]: I1001 14:04:05.570473 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-b33b-account-create-k9fvm" event={"ID":"4f2464d3-9ea7-482c-aa2b-f66d42141756","Type":"ContainerStarted","Data":"90d01a1f10cda04e6512f996dac939f3febb40857a783b4c008d1565b86e6dc1"} Oct 01 14:04:05 crc kubenswrapper[4605]: I1001 14:04:05.606366 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 14:04:05 crc kubenswrapper[4605]: I1001 14:04:05.798675 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c601edbd-d09b-48b1-bc0b-40c01b76468c-scripts\") pod \"c601edbd-d09b-48b1-bc0b-40c01b76468c\" (UID: \"c601edbd-d09b-48b1-bc0b-40c01b76468c\") " Oct 01 14:04:05 crc kubenswrapper[4605]: I1001 14:04:05.798759 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c601edbd-d09b-48b1-bc0b-40c01b76468c-config-data\") pod \"c601edbd-d09b-48b1-bc0b-40c01b76468c\" (UID: \"c601edbd-d09b-48b1-bc0b-40c01b76468c\") " Oct 01 14:04:05 crc kubenswrapper[4605]: I1001 14:04:05.798792 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c601edbd-d09b-48b1-bc0b-40c01b76468c-log-httpd\") pod \"c601edbd-d09b-48b1-bc0b-40c01b76468c\" (UID: \"c601edbd-d09b-48b1-bc0b-40c01b76468c\") " Oct 01 14:04:05 crc kubenswrapper[4605]: I1001 14:04:05.798850 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c601edbd-d09b-48b1-bc0b-40c01b76468c-run-httpd\") pod \"c601edbd-d09b-48b1-bc0b-40c01b76468c\" (UID: \"c601edbd-d09b-48b1-bc0b-40c01b76468c\") " Oct 01 14:04:05 crc kubenswrapper[4605]: I1001 14:04:05.798886 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c601edbd-d09b-48b1-bc0b-40c01b76468c-sg-core-conf-yaml\") pod \"c601edbd-d09b-48b1-bc0b-40c01b76468c\" (UID: \"c601edbd-d09b-48b1-bc0b-40c01b76468c\") " Oct 01 14:04:05 crc kubenswrapper[4605]: I1001 14:04:05.798937 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c601edbd-d09b-48b1-bc0b-40c01b76468c-combined-ca-bundle\") pod \"c601edbd-d09b-48b1-bc0b-40c01b76468c\" (UID: \"c601edbd-d09b-48b1-bc0b-40c01b76468c\") " Oct 01 14:04:05 crc kubenswrapper[4605]: I1001 14:04:05.798983 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z9rhk\" (UniqueName: \"kubernetes.io/projected/c601edbd-d09b-48b1-bc0b-40c01b76468c-kube-api-access-z9rhk\") pod \"c601edbd-d09b-48b1-bc0b-40c01b76468c\" (UID: \"c601edbd-d09b-48b1-bc0b-40c01b76468c\") " Oct 01 14:04:05 crc kubenswrapper[4605]: I1001 14:04:05.800521 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c601edbd-d09b-48b1-bc0b-40c01b76468c-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "c601edbd-d09b-48b1-bc0b-40c01b76468c" (UID: "c601edbd-d09b-48b1-bc0b-40c01b76468c"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:04:05 crc kubenswrapper[4605]: I1001 14:04:05.801510 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c601edbd-d09b-48b1-bc0b-40c01b76468c-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "c601edbd-d09b-48b1-bc0b-40c01b76468c" (UID: "c601edbd-d09b-48b1-bc0b-40c01b76468c"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:04:05 crc kubenswrapper[4605]: I1001 14:04:05.801556 4605 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c601edbd-d09b-48b1-bc0b-40c01b76468c-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 01 14:04:05 crc kubenswrapper[4605]: I1001 14:04:05.807887 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c601edbd-d09b-48b1-bc0b-40c01b76468c-scripts" (OuterVolumeSpecName: "scripts") pod "c601edbd-d09b-48b1-bc0b-40c01b76468c" (UID: "c601edbd-d09b-48b1-bc0b-40c01b76468c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:04:05 crc kubenswrapper[4605]: I1001 14:04:05.808042 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c601edbd-d09b-48b1-bc0b-40c01b76468c-kube-api-access-z9rhk" (OuterVolumeSpecName: "kube-api-access-z9rhk") pod "c601edbd-d09b-48b1-bc0b-40c01b76468c" (UID: "c601edbd-d09b-48b1-bc0b-40c01b76468c"). InnerVolumeSpecName "kube-api-access-z9rhk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:04:05 crc kubenswrapper[4605]: I1001 14:04:05.849060 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c601edbd-d09b-48b1-bc0b-40c01b76468c-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "c601edbd-d09b-48b1-bc0b-40c01b76468c" (UID: "c601edbd-d09b-48b1-bc0b-40c01b76468c"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:04:05 crc kubenswrapper[4605]: I1001 14:04:05.903479 4605 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c601edbd-d09b-48b1-bc0b-40c01b76468c-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 01 14:04:05 crc kubenswrapper[4605]: I1001 14:04:05.903814 4605 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c601edbd-d09b-48b1-bc0b-40c01b76468c-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 01 14:04:05 crc kubenswrapper[4605]: I1001 14:04:05.903831 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z9rhk\" (UniqueName: \"kubernetes.io/projected/c601edbd-d09b-48b1-bc0b-40c01b76468c-kube-api-access-z9rhk\") on node \"crc\" DevicePath \"\"" Oct 01 14:04:05 crc kubenswrapper[4605]: I1001 14:04:05.903844 4605 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c601edbd-d09b-48b1-bc0b-40c01b76468c-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 14:04:05 crc kubenswrapper[4605]: I1001 14:04:05.915515 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c601edbd-d09b-48b1-bc0b-40c01b76468c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c601edbd-d09b-48b1-bc0b-40c01b76468c" (UID: "c601edbd-d09b-48b1-bc0b-40c01b76468c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:04:05 crc kubenswrapper[4605]: I1001 14:04:05.992252 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c601edbd-d09b-48b1-bc0b-40c01b76468c-config-data" (OuterVolumeSpecName: "config-data") pod "c601edbd-d09b-48b1-bc0b-40c01b76468c" (UID: "c601edbd-d09b-48b1-bc0b-40c01b76468c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:04:06 crc kubenswrapper[4605]: I1001 14:04:06.009201 4605 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c601edbd-d09b-48b1-bc0b-40c01b76468c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 14:04:06 crc kubenswrapper[4605]: I1001 14:04:06.009497 4605 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c601edbd-d09b-48b1-bc0b-40c01b76468c-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 14:04:06 crc kubenswrapper[4605]: I1001 14:04:06.579565 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"2cbd5fa3-f41d-4f68-a0f3-ef876a17a0b6","Type":"ContainerStarted","Data":"0783020279dcc2a9d881ec3d279329bdea412b424648516874ef82fcdf80e60a"} Oct 01 14:04:06 crc kubenswrapper[4605]: I1001 14:04:06.581972 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c601edbd-d09b-48b1-bc0b-40c01b76468c","Type":"ContainerDied","Data":"60fdd558c7ddba8c140a4ea611ada8920954a7a304a4d18cf0f2a1ef388c341d"} Oct 01 14:04:06 crc kubenswrapper[4605]: I1001 14:04:06.582051 4605 scope.go:117] "RemoveContainer" containerID="c1933dd261b4542e6fa28100192fa2628f78c08d9c0c493718f22f2d025e6334" Oct 01 14:04:06 crc kubenswrapper[4605]: I1001 14:04:06.582285 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 14:04:06 crc kubenswrapper[4605]: I1001 14:04:06.619682 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=4.619664705 podStartE2EDuration="4.619664705s" podCreationTimestamp="2025-10-01 14:04:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 14:04:06.612639367 +0000 UTC m=+1169.356615575" watchObservedRunningTime="2025-10-01 14:04:06.619664705 +0000 UTC m=+1169.363640913" Oct 01 14:04:06 crc kubenswrapper[4605]: I1001 14:04:06.639320 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 01 14:04:06 crc kubenswrapper[4605]: I1001 14:04:06.664189 4605 scope.go:117] "RemoveContainer" containerID="005a5fa58a06e469de286a6ba79370ebec9decb47f4c6435fe51867e1d2156ae" Oct 01 14:04:06 crc kubenswrapper[4605]: I1001 14:04:06.670750 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 01 14:04:06 crc kubenswrapper[4605]: I1001 14:04:06.691713 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 01 14:04:06 crc kubenswrapper[4605]: E1001 14:04:06.692124 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c601edbd-d09b-48b1-bc0b-40c01b76468c" containerName="proxy-httpd" Oct 01 14:04:06 crc kubenswrapper[4605]: I1001 14:04:06.692139 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="c601edbd-d09b-48b1-bc0b-40c01b76468c" containerName="proxy-httpd" Oct 01 14:04:06 crc kubenswrapper[4605]: E1001 14:04:06.692152 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c601edbd-d09b-48b1-bc0b-40c01b76468c" containerName="ceilometer-central-agent" Oct 01 14:04:06 crc kubenswrapper[4605]: I1001 14:04:06.692158 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="c601edbd-d09b-48b1-bc0b-40c01b76468c" containerName="ceilometer-central-agent" Oct 01 14:04:06 crc kubenswrapper[4605]: E1001 14:04:06.692171 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c601edbd-d09b-48b1-bc0b-40c01b76468c" containerName="sg-core" Oct 01 14:04:06 crc kubenswrapper[4605]: I1001 14:04:06.692176 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="c601edbd-d09b-48b1-bc0b-40c01b76468c" containerName="sg-core" Oct 01 14:04:06 crc kubenswrapper[4605]: E1001 14:04:06.692191 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c601edbd-d09b-48b1-bc0b-40c01b76468c" containerName="ceilometer-notification-agent" Oct 01 14:04:06 crc kubenswrapper[4605]: I1001 14:04:06.692197 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="c601edbd-d09b-48b1-bc0b-40c01b76468c" containerName="ceilometer-notification-agent" Oct 01 14:04:06 crc kubenswrapper[4605]: I1001 14:04:06.692360 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="c601edbd-d09b-48b1-bc0b-40c01b76468c" containerName="sg-core" Oct 01 14:04:06 crc kubenswrapper[4605]: I1001 14:04:06.692372 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="c601edbd-d09b-48b1-bc0b-40c01b76468c" containerName="proxy-httpd" Oct 01 14:04:06 crc kubenswrapper[4605]: I1001 14:04:06.692390 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="c601edbd-d09b-48b1-bc0b-40c01b76468c" containerName="ceilometer-central-agent" Oct 01 14:04:06 crc kubenswrapper[4605]: I1001 14:04:06.692404 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="c601edbd-d09b-48b1-bc0b-40c01b76468c" containerName="ceilometer-notification-agent" Oct 01 14:04:06 crc kubenswrapper[4605]: I1001 14:04:06.693960 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 14:04:06 crc kubenswrapper[4605]: I1001 14:04:06.698248 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 01 14:04:06 crc kubenswrapper[4605]: I1001 14:04:06.698253 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 01 14:04:06 crc kubenswrapper[4605]: I1001 14:04:06.703352 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 01 14:04:06 crc kubenswrapper[4605]: I1001 14:04:06.727682 4605 scope.go:117] "RemoveContainer" containerID="b6b3c5aa12525d71d836509cef2357ce28d6877d9e39ca6b90e8072034920cce" Oct 01 14:04:06 crc kubenswrapper[4605]: I1001 14:04:06.769756 4605 scope.go:117] "RemoveContainer" containerID="aaa16ea88fffe7c66bbc655644ce223b031f9f8d10ba1fcaf42752892fec8978" Oct 01 14:04:06 crc kubenswrapper[4605]: I1001 14:04:06.826798 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9777c293-8ce8-4d82-a10a-b7d4c879227f-config-data\") pod \"ceilometer-0\" (UID: \"9777c293-8ce8-4d82-a10a-b7d4c879227f\") " pod="openstack/ceilometer-0" Oct 01 14:04:06 crc kubenswrapper[4605]: I1001 14:04:06.826855 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9777c293-8ce8-4d82-a10a-b7d4c879227f-log-httpd\") pod \"ceilometer-0\" (UID: \"9777c293-8ce8-4d82-a10a-b7d4c879227f\") " pod="openstack/ceilometer-0" Oct 01 14:04:06 crc kubenswrapper[4605]: I1001 14:04:06.826887 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9777c293-8ce8-4d82-a10a-b7d4c879227f-scripts\") pod \"ceilometer-0\" (UID: \"9777c293-8ce8-4d82-a10a-b7d4c879227f\") " pod="openstack/ceilometer-0" Oct 01 14:04:06 crc kubenswrapper[4605]: I1001 14:04:06.826925 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9777c293-8ce8-4d82-a10a-b7d4c879227f-run-httpd\") pod \"ceilometer-0\" (UID: \"9777c293-8ce8-4d82-a10a-b7d4c879227f\") " pod="openstack/ceilometer-0" Oct 01 14:04:06 crc kubenswrapper[4605]: I1001 14:04:06.826960 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-78q9p\" (UniqueName: \"kubernetes.io/projected/9777c293-8ce8-4d82-a10a-b7d4c879227f-kube-api-access-78q9p\") pod \"ceilometer-0\" (UID: \"9777c293-8ce8-4d82-a10a-b7d4c879227f\") " pod="openstack/ceilometer-0" Oct 01 14:04:06 crc kubenswrapper[4605]: I1001 14:04:06.826987 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9777c293-8ce8-4d82-a10a-b7d4c879227f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"9777c293-8ce8-4d82-a10a-b7d4c879227f\") " pod="openstack/ceilometer-0" Oct 01 14:04:06 crc kubenswrapper[4605]: I1001 14:04:06.827007 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9777c293-8ce8-4d82-a10a-b7d4c879227f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"9777c293-8ce8-4d82-a10a-b7d4c879227f\") " pod="openstack/ceilometer-0" Oct 01 14:04:06 crc kubenswrapper[4605]: I1001 14:04:06.927910 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9777c293-8ce8-4d82-a10a-b7d4c879227f-run-httpd\") pod \"ceilometer-0\" (UID: \"9777c293-8ce8-4d82-a10a-b7d4c879227f\") " pod="openstack/ceilometer-0" Oct 01 14:04:06 crc kubenswrapper[4605]: I1001 14:04:06.927959 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-78q9p\" (UniqueName: \"kubernetes.io/projected/9777c293-8ce8-4d82-a10a-b7d4c879227f-kube-api-access-78q9p\") pod \"ceilometer-0\" (UID: \"9777c293-8ce8-4d82-a10a-b7d4c879227f\") " pod="openstack/ceilometer-0" Oct 01 14:04:06 crc kubenswrapper[4605]: I1001 14:04:06.927990 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9777c293-8ce8-4d82-a10a-b7d4c879227f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"9777c293-8ce8-4d82-a10a-b7d4c879227f\") " pod="openstack/ceilometer-0" Oct 01 14:04:06 crc kubenswrapper[4605]: I1001 14:04:06.928006 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9777c293-8ce8-4d82-a10a-b7d4c879227f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"9777c293-8ce8-4d82-a10a-b7d4c879227f\") " pod="openstack/ceilometer-0" Oct 01 14:04:06 crc kubenswrapper[4605]: I1001 14:04:06.928057 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9777c293-8ce8-4d82-a10a-b7d4c879227f-config-data\") pod \"ceilometer-0\" (UID: \"9777c293-8ce8-4d82-a10a-b7d4c879227f\") " pod="openstack/ceilometer-0" Oct 01 14:04:06 crc kubenswrapper[4605]: I1001 14:04:06.928087 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9777c293-8ce8-4d82-a10a-b7d4c879227f-log-httpd\") pod \"ceilometer-0\" (UID: \"9777c293-8ce8-4d82-a10a-b7d4c879227f\") " pod="openstack/ceilometer-0" Oct 01 14:04:06 crc kubenswrapper[4605]: I1001 14:04:06.928147 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9777c293-8ce8-4d82-a10a-b7d4c879227f-scripts\") pod \"ceilometer-0\" (UID: \"9777c293-8ce8-4d82-a10a-b7d4c879227f\") " pod="openstack/ceilometer-0" Oct 01 14:04:06 crc kubenswrapper[4605]: I1001 14:04:06.928341 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9777c293-8ce8-4d82-a10a-b7d4c879227f-run-httpd\") pod \"ceilometer-0\" (UID: \"9777c293-8ce8-4d82-a10a-b7d4c879227f\") " pod="openstack/ceilometer-0" Oct 01 14:04:06 crc kubenswrapper[4605]: I1001 14:04:06.928699 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9777c293-8ce8-4d82-a10a-b7d4c879227f-log-httpd\") pod \"ceilometer-0\" (UID: \"9777c293-8ce8-4d82-a10a-b7d4c879227f\") " pod="openstack/ceilometer-0" Oct 01 14:04:06 crc kubenswrapper[4605]: I1001 14:04:06.933623 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9777c293-8ce8-4d82-a10a-b7d4c879227f-config-data\") pod \"ceilometer-0\" (UID: \"9777c293-8ce8-4d82-a10a-b7d4c879227f\") " pod="openstack/ceilometer-0" Oct 01 14:04:06 crc kubenswrapper[4605]: I1001 14:04:06.934121 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9777c293-8ce8-4d82-a10a-b7d4c879227f-scripts\") pod \"ceilometer-0\" (UID: \"9777c293-8ce8-4d82-a10a-b7d4c879227f\") " pod="openstack/ceilometer-0" Oct 01 14:04:06 crc kubenswrapper[4605]: I1001 14:04:06.935471 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9777c293-8ce8-4d82-a10a-b7d4c879227f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"9777c293-8ce8-4d82-a10a-b7d4c879227f\") " pod="openstack/ceilometer-0" Oct 01 14:04:06 crc kubenswrapper[4605]: I1001 14:04:06.949521 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9777c293-8ce8-4d82-a10a-b7d4c879227f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"9777c293-8ce8-4d82-a10a-b7d4c879227f\") " pod="openstack/ceilometer-0" Oct 01 14:04:06 crc kubenswrapper[4605]: I1001 14:04:06.951579 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-78q9p\" (UniqueName: \"kubernetes.io/projected/9777c293-8ce8-4d82-a10a-b7d4c879227f-kube-api-access-78q9p\") pod \"ceilometer-0\" (UID: \"9777c293-8ce8-4d82-a10a-b7d4c879227f\") " pod="openstack/ceilometer-0" Oct 01 14:04:07 crc kubenswrapper[4605]: I1001 14:04:07.025449 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 14:04:07 crc kubenswrapper[4605]: I1001 14:04:07.169917 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-f398-account-create-69t8k" Oct 01 14:04:07 crc kubenswrapper[4605]: I1001 14:04:07.205930 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-e3c6-account-create-gh4rs" Oct 01 14:04:07 crc kubenswrapper[4605]: I1001 14:04:07.206968 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-b33b-account-create-k9fvm" Oct 01 14:04:07 crc kubenswrapper[4605]: I1001 14:04:07.341107 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bs84b\" (UniqueName: \"kubernetes.io/projected/4f2464d3-9ea7-482c-aa2b-f66d42141756-kube-api-access-bs84b\") pod \"4f2464d3-9ea7-482c-aa2b-f66d42141756\" (UID: \"4f2464d3-9ea7-482c-aa2b-f66d42141756\") " Oct 01 14:04:07 crc kubenswrapper[4605]: I1001 14:04:07.341162 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kxs8n\" (UniqueName: \"kubernetes.io/projected/dc6e55f0-8554-4e08-a425-104179aecfd3-kube-api-access-kxs8n\") pod \"dc6e55f0-8554-4e08-a425-104179aecfd3\" (UID: \"dc6e55f0-8554-4e08-a425-104179aecfd3\") " Oct 01 14:04:07 crc kubenswrapper[4605]: I1001 14:04:07.341282 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lbq4t\" (UniqueName: \"kubernetes.io/projected/5ce5e6eb-9c71-46d4-938d-707d1586936d-kube-api-access-lbq4t\") pod \"5ce5e6eb-9c71-46d4-938d-707d1586936d\" (UID: \"5ce5e6eb-9c71-46d4-938d-707d1586936d\") " Oct 01 14:04:07 crc kubenswrapper[4605]: I1001 14:04:07.348375 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4f2464d3-9ea7-482c-aa2b-f66d42141756-kube-api-access-bs84b" (OuterVolumeSpecName: "kube-api-access-bs84b") pod "4f2464d3-9ea7-482c-aa2b-f66d42141756" (UID: "4f2464d3-9ea7-482c-aa2b-f66d42141756"). InnerVolumeSpecName "kube-api-access-bs84b". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:04:07 crc kubenswrapper[4605]: I1001 14:04:07.360591 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dc6e55f0-8554-4e08-a425-104179aecfd3-kube-api-access-kxs8n" (OuterVolumeSpecName: "kube-api-access-kxs8n") pod "dc6e55f0-8554-4e08-a425-104179aecfd3" (UID: "dc6e55f0-8554-4e08-a425-104179aecfd3"). InnerVolumeSpecName "kube-api-access-kxs8n". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:04:07 crc kubenswrapper[4605]: I1001 14:04:07.361451 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5ce5e6eb-9c71-46d4-938d-707d1586936d-kube-api-access-lbq4t" (OuterVolumeSpecName: "kube-api-access-lbq4t") pod "5ce5e6eb-9c71-46d4-938d-707d1586936d" (UID: "5ce5e6eb-9c71-46d4-938d-707d1586936d"). InnerVolumeSpecName "kube-api-access-lbq4t". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:04:07 crc kubenswrapper[4605]: I1001 14:04:07.443305 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bs84b\" (UniqueName: \"kubernetes.io/projected/4f2464d3-9ea7-482c-aa2b-f66d42141756-kube-api-access-bs84b\") on node \"crc\" DevicePath \"\"" Oct 01 14:04:07 crc kubenswrapper[4605]: I1001 14:04:07.443622 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kxs8n\" (UniqueName: \"kubernetes.io/projected/dc6e55f0-8554-4e08-a425-104179aecfd3-kube-api-access-kxs8n\") on node \"crc\" DevicePath \"\"" Oct 01 14:04:07 crc kubenswrapper[4605]: I1001 14:04:07.443699 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lbq4t\" (UniqueName: \"kubernetes.io/projected/5ce5e6eb-9c71-46d4-938d-707d1586936d-kube-api-access-lbq4t\") on node \"crc\" DevicePath \"\"" Oct 01 14:04:07 crc kubenswrapper[4605]: I1001 14:04:07.593725 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-e3c6-account-create-gh4rs" event={"ID":"5ce5e6eb-9c71-46d4-938d-707d1586936d","Type":"ContainerDied","Data":"6c8f5b46be80def26c8f1ec798da5cfd0ca0cab704c733af22112dc07d8de5cf"} Oct 01 14:04:07 crc kubenswrapper[4605]: I1001 14:04:07.593763 4605 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6c8f5b46be80def26c8f1ec798da5cfd0ca0cab704c733af22112dc07d8de5cf" Oct 01 14:04:07 crc kubenswrapper[4605]: I1001 14:04:07.593780 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-e3c6-account-create-gh4rs" Oct 01 14:04:07 crc kubenswrapper[4605]: I1001 14:04:07.596645 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-f398-account-create-69t8k" Oct 01 14:04:07 crc kubenswrapper[4605]: I1001 14:04:07.596672 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-f398-account-create-69t8k" event={"ID":"dc6e55f0-8554-4e08-a425-104179aecfd3","Type":"ContainerDied","Data":"1acea1b22997cb195d571379dbf139d873e97c0a5dbbb32fa262f4486c2da362"} Oct 01 14:04:07 crc kubenswrapper[4605]: I1001 14:04:07.596869 4605 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1acea1b22997cb195d571379dbf139d873e97c0a5dbbb32fa262f4486c2da362" Oct 01 14:04:07 crc kubenswrapper[4605]: I1001 14:04:07.597999 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-b33b-account-create-k9fvm" Oct 01 14:04:07 crc kubenswrapper[4605]: I1001 14:04:07.598048 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-b33b-account-create-k9fvm" event={"ID":"4f2464d3-9ea7-482c-aa2b-f66d42141756","Type":"ContainerDied","Data":"90d01a1f10cda04e6512f996dac939f3febb40857a783b4c008d1565b86e6dc1"} Oct 01 14:04:07 crc kubenswrapper[4605]: I1001 14:04:07.598077 4605 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="90d01a1f10cda04e6512f996dac939f3febb40857a783b4c008d1565b86e6dc1" Oct 01 14:04:07 crc kubenswrapper[4605]: I1001 14:04:07.730342 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 01 14:04:07 crc kubenswrapper[4605]: W1001 14:04:07.735481 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9777c293_8ce8_4d82_a10a_b7d4c879227f.slice/crio-71b2ac3553fb7aacd9b498850bdfd6fd51dd5519253e67c8625bbc55d66aed71 WatchSource:0}: Error finding container 71b2ac3553fb7aacd9b498850bdfd6fd51dd5519253e67c8625bbc55d66aed71: Status 404 returned error can't find the container with id 71b2ac3553fb7aacd9b498850bdfd6fd51dd5519253e67c8625bbc55d66aed71 Oct 01 14:04:07 crc kubenswrapper[4605]: I1001 14:04:07.771355 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 01 14:04:07 crc kubenswrapper[4605]: I1001 14:04:07.953807 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c601edbd-d09b-48b1-bc0b-40c01b76468c" path="/var/lib/kubelet/pods/c601edbd-d09b-48b1-bc0b-40c01b76468c/volumes" Oct 01 14:04:08 crc kubenswrapper[4605]: I1001 14:04:08.607057 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9777c293-8ce8-4d82-a10a-b7d4c879227f","Type":"ContainerStarted","Data":"da455ac9cd226fabc09211552beae3cb2f8ef1140b2ae5c97a6a6af89e6ccfe6"} Oct 01 14:04:08 crc kubenswrapper[4605]: I1001 14:04:08.607478 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9777c293-8ce8-4d82-a10a-b7d4c879227f","Type":"ContainerStarted","Data":"71b2ac3553fb7aacd9b498850bdfd6fd51dd5519253e67c8625bbc55d66aed71"} Oct 01 14:04:08 crc kubenswrapper[4605]: I1001 14:04:08.785379 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-mfgwh"] Oct 01 14:04:08 crc kubenswrapper[4605]: E1001 14:04:08.785765 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f2464d3-9ea7-482c-aa2b-f66d42141756" containerName="mariadb-account-create" Oct 01 14:04:08 crc kubenswrapper[4605]: I1001 14:04:08.785783 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f2464d3-9ea7-482c-aa2b-f66d42141756" containerName="mariadb-account-create" Oct 01 14:04:08 crc kubenswrapper[4605]: E1001 14:04:08.785813 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc6e55f0-8554-4e08-a425-104179aecfd3" containerName="mariadb-account-create" Oct 01 14:04:08 crc kubenswrapper[4605]: I1001 14:04:08.785821 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc6e55f0-8554-4e08-a425-104179aecfd3" containerName="mariadb-account-create" Oct 01 14:04:08 crc kubenswrapper[4605]: E1001 14:04:08.785832 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ce5e6eb-9c71-46d4-938d-707d1586936d" containerName="mariadb-account-create" Oct 01 14:04:08 crc kubenswrapper[4605]: I1001 14:04:08.785838 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ce5e6eb-9c71-46d4-938d-707d1586936d" containerName="mariadb-account-create" Oct 01 14:04:08 crc kubenswrapper[4605]: I1001 14:04:08.786002 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f2464d3-9ea7-482c-aa2b-f66d42141756" containerName="mariadb-account-create" Oct 01 14:04:08 crc kubenswrapper[4605]: I1001 14:04:08.786023 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ce5e6eb-9c71-46d4-938d-707d1586936d" containerName="mariadb-account-create" Oct 01 14:04:08 crc kubenswrapper[4605]: I1001 14:04:08.786037 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc6e55f0-8554-4e08-a425-104179aecfd3" containerName="mariadb-account-create" Oct 01 14:04:08 crc kubenswrapper[4605]: I1001 14:04:08.786608 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-mfgwh" Oct 01 14:04:08 crc kubenswrapper[4605]: I1001 14:04:08.788728 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Oct 01 14:04:08 crc kubenswrapper[4605]: I1001 14:04:08.789042 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Oct 01 14:04:08 crc kubenswrapper[4605]: I1001 14:04:08.789195 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-8sv9g" Oct 01 14:04:08 crc kubenswrapper[4605]: I1001 14:04:08.814807 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-mfgwh"] Oct 01 14:04:08 crc kubenswrapper[4605]: I1001 14:04:08.873201 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a5c924f-a949-47cd-802a-4246716c3504-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-mfgwh\" (UID: \"9a5c924f-a949-47cd-802a-4246716c3504\") " pod="openstack/nova-cell0-conductor-db-sync-mfgwh" Oct 01 14:04:08 crc kubenswrapper[4605]: I1001 14:04:08.873277 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9a5c924f-a949-47cd-802a-4246716c3504-scripts\") pod \"nova-cell0-conductor-db-sync-mfgwh\" (UID: \"9a5c924f-a949-47cd-802a-4246716c3504\") " pod="openstack/nova-cell0-conductor-db-sync-mfgwh" Oct 01 14:04:08 crc kubenswrapper[4605]: I1001 14:04:08.873342 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9a5c924f-a949-47cd-802a-4246716c3504-config-data\") pod \"nova-cell0-conductor-db-sync-mfgwh\" (UID: \"9a5c924f-a949-47cd-802a-4246716c3504\") " pod="openstack/nova-cell0-conductor-db-sync-mfgwh" Oct 01 14:04:08 crc kubenswrapper[4605]: I1001 14:04:08.873399 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2xz66\" (UniqueName: \"kubernetes.io/projected/9a5c924f-a949-47cd-802a-4246716c3504-kube-api-access-2xz66\") pod \"nova-cell0-conductor-db-sync-mfgwh\" (UID: \"9a5c924f-a949-47cd-802a-4246716c3504\") " pod="openstack/nova-cell0-conductor-db-sync-mfgwh" Oct 01 14:04:08 crc kubenswrapper[4605]: I1001 14:04:08.974586 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9a5c924f-a949-47cd-802a-4246716c3504-scripts\") pod \"nova-cell0-conductor-db-sync-mfgwh\" (UID: \"9a5c924f-a949-47cd-802a-4246716c3504\") " pod="openstack/nova-cell0-conductor-db-sync-mfgwh" Oct 01 14:04:08 crc kubenswrapper[4605]: I1001 14:04:08.974666 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9a5c924f-a949-47cd-802a-4246716c3504-config-data\") pod \"nova-cell0-conductor-db-sync-mfgwh\" (UID: \"9a5c924f-a949-47cd-802a-4246716c3504\") " pod="openstack/nova-cell0-conductor-db-sync-mfgwh" Oct 01 14:04:08 crc kubenswrapper[4605]: I1001 14:04:08.974718 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2xz66\" (UniqueName: \"kubernetes.io/projected/9a5c924f-a949-47cd-802a-4246716c3504-kube-api-access-2xz66\") pod \"nova-cell0-conductor-db-sync-mfgwh\" (UID: \"9a5c924f-a949-47cd-802a-4246716c3504\") " pod="openstack/nova-cell0-conductor-db-sync-mfgwh" Oct 01 14:04:08 crc kubenswrapper[4605]: I1001 14:04:08.974782 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a5c924f-a949-47cd-802a-4246716c3504-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-mfgwh\" (UID: \"9a5c924f-a949-47cd-802a-4246716c3504\") " pod="openstack/nova-cell0-conductor-db-sync-mfgwh" Oct 01 14:04:08 crc kubenswrapper[4605]: I1001 14:04:08.979651 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9a5c924f-a949-47cd-802a-4246716c3504-scripts\") pod \"nova-cell0-conductor-db-sync-mfgwh\" (UID: \"9a5c924f-a949-47cd-802a-4246716c3504\") " pod="openstack/nova-cell0-conductor-db-sync-mfgwh" Oct 01 14:04:08 crc kubenswrapper[4605]: I1001 14:04:08.979758 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a5c924f-a949-47cd-802a-4246716c3504-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-mfgwh\" (UID: \"9a5c924f-a949-47cd-802a-4246716c3504\") " pod="openstack/nova-cell0-conductor-db-sync-mfgwh" Oct 01 14:04:08 crc kubenswrapper[4605]: I1001 14:04:08.980025 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9a5c924f-a949-47cd-802a-4246716c3504-config-data\") pod \"nova-cell0-conductor-db-sync-mfgwh\" (UID: \"9a5c924f-a949-47cd-802a-4246716c3504\") " pod="openstack/nova-cell0-conductor-db-sync-mfgwh" Oct 01 14:04:08 crc kubenswrapper[4605]: I1001 14:04:08.996547 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2xz66\" (UniqueName: \"kubernetes.io/projected/9a5c924f-a949-47cd-802a-4246716c3504-kube-api-access-2xz66\") pod \"nova-cell0-conductor-db-sync-mfgwh\" (UID: \"9a5c924f-a949-47cd-802a-4246716c3504\") " pod="openstack/nova-cell0-conductor-db-sync-mfgwh" Oct 01 14:04:09 crc kubenswrapper[4605]: I1001 14:04:09.113226 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-mfgwh" Oct 01 14:04:09 crc kubenswrapper[4605]: I1001 14:04:09.616376 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9777c293-8ce8-4d82-a10a-b7d4c879227f","Type":"ContainerStarted","Data":"4048d8bfd7dc62ae3af1da40c7bec3dd7f9f6e6f1a849974df8ef9269bd29cf6"} Oct 01 14:04:09 crc kubenswrapper[4605]: I1001 14:04:09.648598 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-mfgwh"] Oct 01 14:04:09 crc kubenswrapper[4605]: W1001 14:04:09.649464 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9a5c924f_a949_47cd_802a_4246716c3504.slice/crio-a6fd29e9c38743707a33b3b8c5e730ff6965e1555625f0d9bbd0f939dea0891c WatchSource:0}: Error finding container a6fd29e9c38743707a33b3b8c5e730ff6965e1555625f0d9bbd0f939dea0891c: Status 404 returned error can't find the container with id a6fd29e9c38743707a33b3b8c5e730ff6965e1555625f0d9bbd0f939dea0891c Oct 01 14:04:10 crc kubenswrapper[4605]: I1001 14:04:10.639164 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9777c293-8ce8-4d82-a10a-b7d4c879227f","Type":"ContainerStarted","Data":"a6612c1cea6041855931ce4a21fe891e602fc802dca30a69c3c06d168631ce46"} Oct 01 14:04:10 crc kubenswrapper[4605]: I1001 14:04:10.641297 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-mfgwh" event={"ID":"9a5c924f-a949-47cd-802a-4246716c3504","Type":"ContainerStarted","Data":"a6fd29e9c38743707a33b3b8c5e730ff6965e1555625f0d9bbd0f939dea0891c"} Oct 01 14:04:11 crc kubenswrapper[4605]: I1001 14:04:11.123033 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 01 14:04:11 crc kubenswrapper[4605]: I1001 14:04:11.123073 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 01 14:04:11 crc kubenswrapper[4605]: I1001 14:04:11.158835 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 01 14:04:11 crc kubenswrapper[4605]: I1001 14:04:11.187149 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 01 14:04:11 crc kubenswrapper[4605]: I1001 14:04:11.649386 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 01 14:04:11 crc kubenswrapper[4605]: I1001 14:04:11.649696 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 01 14:04:12 crc kubenswrapper[4605]: I1001 14:04:12.865992 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 01 14:04:12 crc kubenswrapper[4605]: I1001 14:04:12.866051 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 01 14:04:12 crc kubenswrapper[4605]: I1001 14:04:12.898674 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 01 14:04:12 crc kubenswrapper[4605]: I1001 14:04:12.952615 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 01 14:04:13 crc kubenswrapper[4605]: I1001 14:04:13.672616 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9777c293-8ce8-4d82-a10a-b7d4c879227f","Type":"ContainerStarted","Data":"a5aa3dfd4d194e27fc97d57d8739eced29be34d041c488b1b77efc800672f808"} Oct 01 14:04:13 crc kubenswrapper[4605]: I1001 14:04:13.673151 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="9777c293-8ce8-4d82-a10a-b7d4c879227f" containerName="ceilometer-notification-agent" containerID="cri-o://4048d8bfd7dc62ae3af1da40c7bec3dd7f9f6e6f1a849974df8ef9269bd29cf6" gracePeriod=30 Oct 01 14:04:13 crc kubenswrapper[4605]: I1001 14:04:13.673172 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 01 14:04:13 crc kubenswrapper[4605]: I1001 14:04:13.673192 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 01 14:04:13 crc kubenswrapper[4605]: I1001 14:04:13.673204 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 01 14:04:13 crc kubenswrapper[4605]: I1001 14:04:13.673137 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="9777c293-8ce8-4d82-a10a-b7d4c879227f" containerName="sg-core" containerID="cri-o://a6612c1cea6041855931ce4a21fe891e602fc802dca30a69c3c06d168631ce46" gracePeriod=30 Oct 01 14:04:13 crc kubenswrapper[4605]: I1001 14:04:13.672644 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="9777c293-8ce8-4d82-a10a-b7d4c879227f" containerName="ceilometer-central-agent" containerID="cri-o://da455ac9cd226fabc09211552beae3cb2f8ef1140b2ae5c97a6a6af89e6ccfe6" gracePeriod=30 Oct 01 14:04:13 crc kubenswrapper[4605]: I1001 14:04:13.673118 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="9777c293-8ce8-4d82-a10a-b7d4c879227f" containerName="proxy-httpd" containerID="cri-o://a5aa3dfd4d194e27fc97d57d8739eced29be34d041c488b1b77efc800672f808" gracePeriod=30 Oct 01 14:04:13 crc kubenswrapper[4605]: I1001 14:04:13.712113 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.247810273 podStartE2EDuration="7.712078372s" podCreationTimestamp="2025-10-01 14:04:06 +0000 UTC" firstStartedPulling="2025-10-01 14:04:07.737741756 +0000 UTC m=+1170.481717964" lastFinishedPulling="2025-10-01 14:04:13.202009855 +0000 UTC m=+1175.945986063" observedRunningTime="2025-10-01 14:04:13.69814933 +0000 UTC m=+1176.442125548" watchObservedRunningTime="2025-10-01 14:04:13.712078372 +0000 UTC m=+1176.456054580" Oct 01 14:04:14 crc kubenswrapper[4605]: I1001 14:04:14.693760 4605 generic.go:334] "Generic (PLEG): container finished" podID="9777c293-8ce8-4d82-a10a-b7d4c879227f" containerID="a5aa3dfd4d194e27fc97d57d8739eced29be34d041c488b1b77efc800672f808" exitCode=0 Oct 01 14:04:14 crc kubenswrapper[4605]: I1001 14:04:14.694049 4605 generic.go:334] "Generic (PLEG): container finished" podID="9777c293-8ce8-4d82-a10a-b7d4c879227f" containerID="a6612c1cea6041855931ce4a21fe891e602fc802dca30a69c3c06d168631ce46" exitCode=2 Oct 01 14:04:14 crc kubenswrapper[4605]: I1001 14:04:14.694056 4605 generic.go:334] "Generic (PLEG): container finished" podID="9777c293-8ce8-4d82-a10a-b7d4c879227f" containerID="4048d8bfd7dc62ae3af1da40c7bec3dd7f9f6e6f1a849974df8ef9269bd29cf6" exitCode=0 Oct 01 14:04:14 crc kubenswrapper[4605]: I1001 14:04:14.694254 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9777c293-8ce8-4d82-a10a-b7d4c879227f","Type":"ContainerDied","Data":"a5aa3dfd4d194e27fc97d57d8739eced29be34d041c488b1b77efc800672f808"} Oct 01 14:04:14 crc kubenswrapper[4605]: I1001 14:04:14.694292 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9777c293-8ce8-4d82-a10a-b7d4c879227f","Type":"ContainerDied","Data":"a6612c1cea6041855931ce4a21fe891e602fc802dca30a69c3c06d168631ce46"} Oct 01 14:04:14 crc kubenswrapper[4605]: I1001 14:04:14.694303 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9777c293-8ce8-4d82-a10a-b7d4c879227f","Type":"ContainerDied","Data":"4048d8bfd7dc62ae3af1da40c7bec3dd7f9f6e6f1a849974df8ef9269bd29cf6"} Oct 01 14:04:15 crc kubenswrapper[4605]: I1001 14:04:15.094019 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 01 14:04:15 crc kubenswrapper[4605]: I1001 14:04:15.094174 4605 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 01 14:04:15 crc kubenswrapper[4605]: I1001 14:04:15.259001 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 01 14:04:15 crc kubenswrapper[4605]: I1001 14:04:15.701247 4605 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 01 14:04:15 crc kubenswrapper[4605]: I1001 14:04:15.701514 4605 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 01 14:04:15 crc kubenswrapper[4605]: I1001 14:04:15.944107 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 01 14:04:16 crc kubenswrapper[4605]: I1001 14:04:16.711514 4605 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 01 14:04:16 crc kubenswrapper[4605]: I1001 14:04:16.871023 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 01 14:04:18 crc kubenswrapper[4605]: I1001 14:04:18.731054 4605 generic.go:334] "Generic (PLEG): container finished" podID="9777c293-8ce8-4d82-a10a-b7d4c879227f" containerID="da455ac9cd226fabc09211552beae3cb2f8ef1140b2ae5c97a6a6af89e6ccfe6" exitCode=0 Oct 01 14:04:18 crc kubenswrapper[4605]: I1001 14:04:18.731134 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9777c293-8ce8-4d82-a10a-b7d4c879227f","Type":"ContainerDied","Data":"da455ac9cd226fabc09211552beae3cb2f8ef1140b2ae5c97a6a6af89e6ccfe6"} Oct 01 14:04:21 crc kubenswrapper[4605]: I1001 14:04:21.135577 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 14:04:21 crc kubenswrapper[4605]: I1001 14:04:21.230612 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9777c293-8ce8-4d82-a10a-b7d4c879227f-run-httpd\") pod \"9777c293-8ce8-4d82-a10a-b7d4c879227f\" (UID: \"9777c293-8ce8-4d82-a10a-b7d4c879227f\") " Oct 01 14:04:21 crc kubenswrapper[4605]: I1001 14:04:21.230651 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-78q9p\" (UniqueName: \"kubernetes.io/projected/9777c293-8ce8-4d82-a10a-b7d4c879227f-kube-api-access-78q9p\") pod \"9777c293-8ce8-4d82-a10a-b7d4c879227f\" (UID: \"9777c293-8ce8-4d82-a10a-b7d4c879227f\") " Oct 01 14:04:21 crc kubenswrapper[4605]: I1001 14:04:21.230738 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9777c293-8ce8-4d82-a10a-b7d4c879227f-sg-core-conf-yaml\") pod \"9777c293-8ce8-4d82-a10a-b7d4c879227f\" (UID: \"9777c293-8ce8-4d82-a10a-b7d4c879227f\") " Oct 01 14:04:21 crc kubenswrapper[4605]: I1001 14:04:21.230815 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9777c293-8ce8-4d82-a10a-b7d4c879227f-log-httpd\") pod \"9777c293-8ce8-4d82-a10a-b7d4c879227f\" (UID: \"9777c293-8ce8-4d82-a10a-b7d4c879227f\") " Oct 01 14:04:21 crc kubenswrapper[4605]: I1001 14:04:21.230848 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9777c293-8ce8-4d82-a10a-b7d4c879227f-config-data\") pod \"9777c293-8ce8-4d82-a10a-b7d4c879227f\" (UID: \"9777c293-8ce8-4d82-a10a-b7d4c879227f\") " Oct 01 14:04:21 crc kubenswrapper[4605]: I1001 14:04:21.230912 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9777c293-8ce8-4d82-a10a-b7d4c879227f-combined-ca-bundle\") pod \"9777c293-8ce8-4d82-a10a-b7d4c879227f\" (UID: \"9777c293-8ce8-4d82-a10a-b7d4c879227f\") " Oct 01 14:04:21 crc kubenswrapper[4605]: I1001 14:04:21.230940 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9777c293-8ce8-4d82-a10a-b7d4c879227f-scripts\") pod \"9777c293-8ce8-4d82-a10a-b7d4c879227f\" (UID: \"9777c293-8ce8-4d82-a10a-b7d4c879227f\") " Oct 01 14:04:21 crc kubenswrapper[4605]: I1001 14:04:21.231238 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9777c293-8ce8-4d82-a10a-b7d4c879227f-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "9777c293-8ce8-4d82-a10a-b7d4c879227f" (UID: "9777c293-8ce8-4d82-a10a-b7d4c879227f"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:04:21 crc kubenswrapper[4605]: I1001 14:04:21.231404 4605 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9777c293-8ce8-4d82-a10a-b7d4c879227f-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 01 14:04:21 crc kubenswrapper[4605]: I1001 14:04:21.231594 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9777c293-8ce8-4d82-a10a-b7d4c879227f-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "9777c293-8ce8-4d82-a10a-b7d4c879227f" (UID: "9777c293-8ce8-4d82-a10a-b7d4c879227f"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:04:21 crc kubenswrapper[4605]: I1001 14:04:21.237119 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9777c293-8ce8-4d82-a10a-b7d4c879227f-scripts" (OuterVolumeSpecName: "scripts") pod "9777c293-8ce8-4d82-a10a-b7d4c879227f" (UID: "9777c293-8ce8-4d82-a10a-b7d4c879227f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:04:21 crc kubenswrapper[4605]: I1001 14:04:21.237750 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9777c293-8ce8-4d82-a10a-b7d4c879227f-kube-api-access-78q9p" (OuterVolumeSpecName: "kube-api-access-78q9p") pod "9777c293-8ce8-4d82-a10a-b7d4c879227f" (UID: "9777c293-8ce8-4d82-a10a-b7d4c879227f"). InnerVolumeSpecName "kube-api-access-78q9p". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:04:21 crc kubenswrapper[4605]: I1001 14:04:21.273185 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9777c293-8ce8-4d82-a10a-b7d4c879227f-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "9777c293-8ce8-4d82-a10a-b7d4c879227f" (UID: "9777c293-8ce8-4d82-a10a-b7d4c879227f"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:04:21 crc kubenswrapper[4605]: I1001 14:04:21.301460 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9777c293-8ce8-4d82-a10a-b7d4c879227f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9777c293-8ce8-4d82-a10a-b7d4c879227f" (UID: "9777c293-8ce8-4d82-a10a-b7d4c879227f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:04:21 crc kubenswrapper[4605]: I1001 14:04:21.332844 4605 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9777c293-8ce8-4d82-a10a-b7d4c879227f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 14:04:21 crc kubenswrapper[4605]: I1001 14:04:21.332891 4605 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9777c293-8ce8-4d82-a10a-b7d4c879227f-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 14:04:21 crc kubenswrapper[4605]: I1001 14:04:21.332903 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-78q9p\" (UniqueName: \"kubernetes.io/projected/9777c293-8ce8-4d82-a10a-b7d4c879227f-kube-api-access-78q9p\") on node \"crc\" DevicePath \"\"" Oct 01 14:04:21 crc kubenswrapper[4605]: I1001 14:04:21.332912 4605 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9777c293-8ce8-4d82-a10a-b7d4c879227f-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 01 14:04:21 crc kubenswrapper[4605]: I1001 14:04:21.332920 4605 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9777c293-8ce8-4d82-a10a-b7d4c879227f-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 01 14:04:21 crc kubenswrapper[4605]: I1001 14:04:21.337843 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9777c293-8ce8-4d82-a10a-b7d4c879227f-config-data" (OuterVolumeSpecName: "config-data") pod "9777c293-8ce8-4d82-a10a-b7d4c879227f" (UID: "9777c293-8ce8-4d82-a10a-b7d4c879227f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:04:21 crc kubenswrapper[4605]: I1001 14:04:21.433559 4605 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9777c293-8ce8-4d82-a10a-b7d4c879227f-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 14:04:21 crc kubenswrapper[4605]: I1001 14:04:21.631368 4605 patch_prober.go:28] interesting pod/machine-config-daemon-zdjh7 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 14:04:21 crc kubenswrapper[4605]: I1001 14:04:21.631428 4605 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 14:04:21 crc kubenswrapper[4605]: I1001 14:04:21.631469 4605 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" Oct 01 14:04:21 crc kubenswrapper[4605]: I1001 14:04:21.632240 4605 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"26228f282f385d65bcc8a30f3ba1b4954e3d59ec9adad591dd318d09c86924ce"} pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 01 14:04:21 crc kubenswrapper[4605]: I1001 14:04:21.632308 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" containerName="machine-config-daemon" containerID="cri-o://26228f282f385d65bcc8a30f3ba1b4954e3d59ec9adad591dd318d09c86924ce" gracePeriod=600 Oct 01 14:04:21 crc kubenswrapper[4605]: I1001 14:04:21.764243 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 14:04:21 crc kubenswrapper[4605]: I1001 14:04:21.764623 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9777c293-8ce8-4d82-a10a-b7d4c879227f","Type":"ContainerDied","Data":"71b2ac3553fb7aacd9b498850bdfd6fd51dd5519253e67c8625bbc55d66aed71"} Oct 01 14:04:21 crc kubenswrapper[4605]: I1001 14:04:21.764673 4605 scope.go:117] "RemoveContainer" containerID="a5aa3dfd4d194e27fc97d57d8739eced29be34d041c488b1b77efc800672f808" Oct 01 14:04:21 crc kubenswrapper[4605]: I1001 14:04:21.778857 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-mfgwh" event={"ID":"9a5c924f-a949-47cd-802a-4246716c3504","Type":"ContainerStarted","Data":"188a0f09b5cb4e548b3d2465a56eb8af13bfc7e1f35bc11b09668440c410db3e"} Oct 01 14:04:21 crc kubenswrapper[4605]: I1001 14:04:21.806198 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-mfgwh" podStartSLOduration=2.585915096 podStartE2EDuration="13.806179224s" podCreationTimestamp="2025-10-01 14:04:08 +0000 UTC" firstStartedPulling="2025-10-01 14:04:09.651451081 +0000 UTC m=+1172.395427289" lastFinishedPulling="2025-10-01 14:04:20.871715209 +0000 UTC m=+1183.615691417" observedRunningTime="2025-10-01 14:04:21.805679672 +0000 UTC m=+1184.549655890" watchObservedRunningTime="2025-10-01 14:04:21.806179224 +0000 UTC m=+1184.550155432" Oct 01 14:04:21 crc kubenswrapper[4605]: I1001 14:04:21.817251 4605 scope.go:117] "RemoveContainer" containerID="a6612c1cea6041855931ce4a21fe891e602fc802dca30a69c3c06d168631ce46" Oct 01 14:04:21 crc kubenswrapper[4605]: I1001 14:04:21.838160 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 01 14:04:21 crc kubenswrapper[4605]: I1001 14:04:21.856161 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 01 14:04:21 crc kubenswrapper[4605]: I1001 14:04:21.856825 4605 scope.go:117] "RemoveContainer" containerID="4048d8bfd7dc62ae3af1da40c7bec3dd7f9f6e6f1a849974df8ef9269bd29cf6" Oct 01 14:04:21 crc kubenswrapper[4605]: I1001 14:04:21.863302 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 01 14:04:21 crc kubenswrapper[4605]: E1001 14:04:21.863707 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9777c293-8ce8-4d82-a10a-b7d4c879227f" containerName="ceilometer-central-agent" Oct 01 14:04:21 crc kubenswrapper[4605]: I1001 14:04:21.863720 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="9777c293-8ce8-4d82-a10a-b7d4c879227f" containerName="ceilometer-central-agent" Oct 01 14:04:21 crc kubenswrapper[4605]: E1001 14:04:21.863739 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9777c293-8ce8-4d82-a10a-b7d4c879227f" containerName="proxy-httpd" Oct 01 14:04:21 crc kubenswrapper[4605]: I1001 14:04:21.863745 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="9777c293-8ce8-4d82-a10a-b7d4c879227f" containerName="proxy-httpd" Oct 01 14:04:21 crc kubenswrapper[4605]: E1001 14:04:21.863769 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9777c293-8ce8-4d82-a10a-b7d4c879227f" containerName="ceilometer-notification-agent" Oct 01 14:04:21 crc kubenswrapper[4605]: I1001 14:04:21.863775 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="9777c293-8ce8-4d82-a10a-b7d4c879227f" containerName="ceilometer-notification-agent" Oct 01 14:04:21 crc kubenswrapper[4605]: E1001 14:04:21.863784 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9777c293-8ce8-4d82-a10a-b7d4c879227f" containerName="sg-core" Oct 01 14:04:21 crc kubenswrapper[4605]: I1001 14:04:21.863790 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="9777c293-8ce8-4d82-a10a-b7d4c879227f" containerName="sg-core" Oct 01 14:04:21 crc kubenswrapper[4605]: I1001 14:04:21.863951 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="9777c293-8ce8-4d82-a10a-b7d4c879227f" containerName="ceilometer-notification-agent" Oct 01 14:04:21 crc kubenswrapper[4605]: I1001 14:04:21.863961 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="9777c293-8ce8-4d82-a10a-b7d4c879227f" containerName="proxy-httpd" Oct 01 14:04:21 crc kubenswrapper[4605]: I1001 14:04:21.863978 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="9777c293-8ce8-4d82-a10a-b7d4c879227f" containerName="sg-core" Oct 01 14:04:21 crc kubenswrapper[4605]: I1001 14:04:21.863995 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="9777c293-8ce8-4d82-a10a-b7d4c879227f" containerName="ceilometer-central-agent" Oct 01 14:04:21 crc kubenswrapper[4605]: I1001 14:04:21.867973 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 14:04:21 crc kubenswrapper[4605]: I1001 14:04:21.871174 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 01 14:04:21 crc kubenswrapper[4605]: I1001 14:04:21.882453 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 01 14:04:21 crc kubenswrapper[4605]: I1001 14:04:21.891264 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 01 14:04:21 crc kubenswrapper[4605]: I1001 14:04:21.942577 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9777c293-8ce8-4d82-a10a-b7d4c879227f" path="/var/lib/kubelet/pods/9777c293-8ce8-4d82-a10a-b7d4c879227f/volumes" Oct 01 14:04:21 crc kubenswrapper[4605]: I1001 14:04:21.942669 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/26405c84-df24-41dc-ae5f-e3146954b888-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"26405c84-df24-41dc-ae5f-e3146954b888\") " pod="openstack/ceilometer-0" Oct 01 14:04:21 crc kubenswrapper[4605]: I1001 14:04:21.943520 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/26405c84-df24-41dc-ae5f-e3146954b888-scripts\") pod \"ceilometer-0\" (UID: \"26405c84-df24-41dc-ae5f-e3146954b888\") " pod="openstack/ceilometer-0" Oct 01 14:04:21 crc kubenswrapper[4605]: I1001 14:04:21.943580 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p6ks5\" (UniqueName: \"kubernetes.io/projected/26405c84-df24-41dc-ae5f-e3146954b888-kube-api-access-p6ks5\") pod \"ceilometer-0\" (UID: \"26405c84-df24-41dc-ae5f-e3146954b888\") " pod="openstack/ceilometer-0" Oct 01 14:04:21 crc kubenswrapper[4605]: I1001 14:04:21.943642 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/26405c84-df24-41dc-ae5f-e3146954b888-log-httpd\") pod \"ceilometer-0\" (UID: \"26405c84-df24-41dc-ae5f-e3146954b888\") " pod="openstack/ceilometer-0" Oct 01 14:04:21 crc kubenswrapper[4605]: I1001 14:04:21.943727 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/26405c84-df24-41dc-ae5f-e3146954b888-config-data\") pod \"ceilometer-0\" (UID: \"26405c84-df24-41dc-ae5f-e3146954b888\") " pod="openstack/ceilometer-0" Oct 01 14:04:21 crc kubenswrapper[4605]: I1001 14:04:21.943765 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/26405c84-df24-41dc-ae5f-e3146954b888-run-httpd\") pod \"ceilometer-0\" (UID: \"26405c84-df24-41dc-ae5f-e3146954b888\") " pod="openstack/ceilometer-0" Oct 01 14:04:21 crc kubenswrapper[4605]: I1001 14:04:21.943902 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/26405c84-df24-41dc-ae5f-e3146954b888-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"26405c84-df24-41dc-ae5f-e3146954b888\") " pod="openstack/ceilometer-0" Oct 01 14:04:21 crc kubenswrapper[4605]: I1001 14:04:21.944783 4605 scope.go:117] "RemoveContainer" containerID="da455ac9cd226fabc09211552beae3cb2f8ef1140b2ae5c97a6a6af89e6ccfe6" Oct 01 14:04:22 crc kubenswrapper[4605]: I1001 14:04:22.046107 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/26405c84-df24-41dc-ae5f-e3146954b888-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"26405c84-df24-41dc-ae5f-e3146954b888\") " pod="openstack/ceilometer-0" Oct 01 14:04:22 crc kubenswrapper[4605]: I1001 14:04:22.046181 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/26405c84-df24-41dc-ae5f-e3146954b888-scripts\") pod \"ceilometer-0\" (UID: \"26405c84-df24-41dc-ae5f-e3146954b888\") " pod="openstack/ceilometer-0" Oct 01 14:04:22 crc kubenswrapper[4605]: I1001 14:04:22.046214 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p6ks5\" (UniqueName: \"kubernetes.io/projected/26405c84-df24-41dc-ae5f-e3146954b888-kube-api-access-p6ks5\") pod \"ceilometer-0\" (UID: \"26405c84-df24-41dc-ae5f-e3146954b888\") " pod="openstack/ceilometer-0" Oct 01 14:04:22 crc kubenswrapper[4605]: I1001 14:04:22.046243 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/26405c84-df24-41dc-ae5f-e3146954b888-log-httpd\") pod \"ceilometer-0\" (UID: \"26405c84-df24-41dc-ae5f-e3146954b888\") " pod="openstack/ceilometer-0" Oct 01 14:04:22 crc kubenswrapper[4605]: I1001 14:04:22.046275 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/26405c84-df24-41dc-ae5f-e3146954b888-config-data\") pod \"ceilometer-0\" (UID: \"26405c84-df24-41dc-ae5f-e3146954b888\") " pod="openstack/ceilometer-0" Oct 01 14:04:22 crc kubenswrapper[4605]: I1001 14:04:22.046296 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/26405c84-df24-41dc-ae5f-e3146954b888-run-httpd\") pod \"ceilometer-0\" (UID: \"26405c84-df24-41dc-ae5f-e3146954b888\") " pod="openstack/ceilometer-0" Oct 01 14:04:22 crc kubenswrapper[4605]: I1001 14:04:22.046341 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/26405c84-df24-41dc-ae5f-e3146954b888-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"26405c84-df24-41dc-ae5f-e3146954b888\") " pod="openstack/ceilometer-0" Oct 01 14:04:22 crc kubenswrapper[4605]: I1001 14:04:22.047316 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/26405c84-df24-41dc-ae5f-e3146954b888-log-httpd\") pod \"ceilometer-0\" (UID: \"26405c84-df24-41dc-ae5f-e3146954b888\") " pod="openstack/ceilometer-0" Oct 01 14:04:22 crc kubenswrapper[4605]: I1001 14:04:22.047682 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/26405c84-df24-41dc-ae5f-e3146954b888-run-httpd\") pod \"ceilometer-0\" (UID: \"26405c84-df24-41dc-ae5f-e3146954b888\") " pod="openstack/ceilometer-0" Oct 01 14:04:22 crc kubenswrapper[4605]: I1001 14:04:22.050793 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/26405c84-df24-41dc-ae5f-e3146954b888-scripts\") pod \"ceilometer-0\" (UID: \"26405c84-df24-41dc-ae5f-e3146954b888\") " pod="openstack/ceilometer-0" Oct 01 14:04:22 crc kubenswrapper[4605]: I1001 14:04:22.053130 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/26405c84-df24-41dc-ae5f-e3146954b888-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"26405c84-df24-41dc-ae5f-e3146954b888\") " pod="openstack/ceilometer-0" Oct 01 14:04:22 crc kubenswrapper[4605]: I1001 14:04:22.064939 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/26405c84-df24-41dc-ae5f-e3146954b888-config-data\") pod \"ceilometer-0\" (UID: \"26405c84-df24-41dc-ae5f-e3146954b888\") " pod="openstack/ceilometer-0" Oct 01 14:04:22 crc kubenswrapper[4605]: I1001 14:04:22.065986 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/26405c84-df24-41dc-ae5f-e3146954b888-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"26405c84-df24-41dc-ae5f-e3146954b888\") " pod="openstack/ceilometer-0" Oct 01 14:04:22 crc kubenswrapper[4605]: I1001 14:04:22.067993 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p6ks5\" (UniqueName: \"kubernetes.io/projected/26405c84-df24-41dc-ae5f-e3146954b888-kube-api-access-p6ks5\") pod \"ceilometer-0\" (UID: \"26405c84-df24-41dc-ae5f-e3146954b888\") " pod="openstack/ceilometer-0" Oct 01 14:04:22 crc kubenswrapper[4605]: I1001 14:04:22.252538 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 14:04:22 crc kubenswrapper[4605]: I1001 14:04:22.740342 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 01 14:04:22 crc kubenswrapper[4605]: I1001 14:04:22.787622 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"26405c84-df24-41dc-ae5f-e3146954b888","Type":"ContainerStarted","Data":"246c665414a18d9388a87a5f08f31c04b896b623d5a85be8080f46b07b1235de"} Oct 01 14:04:22 crc kubenswrapper[4605]: I1001 14:04:22.790353 4605 generic.go:334] "Generic (PLEG): container finished" podID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" containerID="26228f282f385d65bcc8a30f3ba1b4954e3d59ec9adad591dd318d09c86924ce" exitCode=0 Oct 01 14:04:22 crc kubenswrapper[4605]: I1001 14:04:22.790450 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" event={"ID":"f3023060-c8ae-492b-b1cb-a418d9a8e59f","Type":"ContainerDied","Data":"26228f282f385d65bcc8a30f3ba1b4954e3d59ec9adad591dd318d09c86924ce"} Oct 01 14:04:22 crc kubenswrapper[4605]: I1001 14:04:22.790479 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" event={"ID":"f3023060-c8ae-492b-b1cb-a418d9a8e59f","Type":"ContainerStarted","Data":"31f8ad2782362fc773c73e454aeec697a35f4e0956b1bf4d85878b45beec465b"} Oct 01 14:04:22 crc kubenswrapper[4605]: I1001 14:04:22.790497 4605 scope.go:117] "RemoveContainer" containerID="1f69fd4c8b7ea593079cde275a0913d46f3db4c2d1ad72f22e5ac983a6cab564" Oct 01 14:04:23 crc kubenswrapper[4605]: I1001 14:04:23.805724 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"26405c84-df24-41dc-ae5f-e3146954b888","Type":"ContainerStarted","Data":"a7f3826cd60b36ab64c07c52b899048372fc73d1b4e15d7f608a380b04f27d65"} Oct 01 14:04:24 crc kubenswrapper[4605]: I1001 14:04:24.821183 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"26405c84-df24-41dc-ae5f-e3146954b888","Type":"ContainerStarted","Data":"e8d7e721997d8f771d4064d87b6186ef46f12f26db2f23364532059a888198a7"} Oct 01 14:04:25 crc kubenswrapper[4605]: I1001 14:04:25.832751 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"26405c84-df24-41dc-ae5f-e3146954b888","Type":"ContainerStarted","Data":"22522085bbc26921c8cae17525286950ff0268fd2de8065016012505c0c598d6"} Oct 01 14:04:28 crc kubenswrapper[4605]: I1001 14:04:28.886123 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"26405c84-df24-41dc-ae5f-e3146954b888","Type":"ContainerStarted","Data":"b279f076245d6570fa80bd2c0f70b8640b7878ca5c4cebf4ee7812920d7ec18e"} Oct 01 14:04:28 crc kubenswrapper[4605]: I1001 14:04:28.886603 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 01 14:04:28 crc kubenswrapper[4605]: I1001 14:04:28.929328 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.385552779 podStartE2EDuration="7.929310358s" podCreationTimestamp="2025-10-01 14:04:21 +0000 UTC" firstStartedPulling="2025-10-01 14:04:22.758447061 +0000 UTC m=+1185.502423269" lastFinishedPulling="2025-10-01 14:04:28.30220464 +0000 UTC m=+1191.046180848" observedRunningTime="2025-10-01 14:04:28.912225876 +0000 UTC m=+1191.656202164" watchObservedRunningTime="2025-10-01 14:04:28.929310358 +0000 UTC m=+1191.673286576" Oct 01 14:04:32 crc kubenswrapper[4605]: I1001 14:04:32.921840 4605 generic.go:334] "Generic (PLEG): container finished" podID="9a5c924f-a949-47cd-802a-4246716c3504" containerID="188a0f09b5cb4e548b3d2465a56eb8af13bfc7e1f35bc11b09668440c410db3e" exitCode=0 Oct 01 14:04:32 crc kubenswrapper[4605]: I1001 14:04:32.921949 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-mfgwh" event={"ID":"9a5c924f-a949-47cd-802a-4246716c3504","Type":"ContainerDied","Data":"188a0f09b5cb4e548b3d2465a56eb8af13bfc7e1f35bc11b09668440c410db3e"} Oct 01 14:04:34 crc kubenswrapper[4605]: I1001 14:04:34.356867 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-mfgwh" Oct 01 14:04:34 crc kubenswrapper[4605]: I1001 14:04:34.391073 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2xz66\" (UniqueName: \"kubernetes.io/projected/9a5c924f-a949-47cd-802a-4246716c3504-kube-api-access-2xz66\") pod \"9a5c924f-a949-47cd-802a-4246716c3504\" (UID: \"9a5c924f-a949-47cd-802a-4246716c3504\") " Oct 01 14:04:34 crc kubenswrapper[4605]: I1001 14:04:34.391192 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9a5c924f-a949-47cd-802a-4246716c3504-scripts\") pod \"9a5c924f-a949-47cd-802a-4246716c3504\" (UID: \"9a5c924f-a949-47cd-802a-4246716c3504\") " Oct 01 14:04:34 crc kubenswrapper[4605]: I1001 14:04:34.391308 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9a5c924f-a949-47cd-802a-4246716c3504-config-data\") pod \"9a5c924f-a949-47cd-802a-4246716c3504\" (UID: \"9a5c924f-a949-47cd-802a-4246716c3504\") " Oct 01 14:04:34 crc kubenswrapper[4605]: I1001 14:04:34.392285 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a5c924f-a949-47cd-802a-4246716c3504-combined-ca-bundle\") pod \"9a5c924f-a949-47cd-802a-4246716c3504\" (UID: \"9a5c924f-a949-47cd-802a-4246716c3504\") " Oct 01 14:04:34 crc kubenswrapper[4605]: I1001 14:04:34.398479 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9a5c924f-a949-47cd-802a-4246716c3504-scripts" (OuterVolumeSpecName: "scripts") pod "9a5c924f-a949-47cd-802a-4246716c3504" (UID: "9a5c924f-a949-47cd-802a-4246716c3504"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:04:34 crc kubenswrapper[4605]: I1001 14:04:34.400315 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9a5c924f-a949-47cd-802a-4246716c3504-kube-api-access-2xz66" (OuterVolumeSpecName: "kube-api-access-2xz66") pod "9a5c924f-a949-47cd-802a-4246716c3504" (UID: "9a5c924f-a949-47cd-802a-4246716c3504"). InnerVolumeSpecName "kube-api-access-2xz66". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:04:34 crc kubenswrapper[4605]: I1001 14:04:34.417472 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9a5c924f-a949-47cd-802a-4246716c3504-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9a5c924f-a949-47cd-802a-4246716c3504" (UID: "9a5c924f-a949-47cd-802a-4246716c3504"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:04:34 crc kubenswrapper[4605]: I1001 14:04:34.434267 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9a5c924f-a949-47cd-802a-4246716c3504-config-data" (OuterVolumeSpecName: "config-data") pod "9a5c924f-a949-47cd-802a-4246716c3504" (UID: "9a5c924f-a949-47cd-802a-4246716c3504"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:04:34 crc kubenswrapper[4605]: I1001 14:04:34.494581 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2xz66\" (UniqueName: \"kubernetes.io/projected/9a5c924f-a949-47cd-802a-4246716c3504-kube-api-access-2xz66\") on node \"crc\" DevicePath \"\"" Oct 01 14:04:34 crc kubenswrapper[4605]: I1001 14:04:34.494616 4605 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9a5c924f-a949-47cd-802a-4246716c3504-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 14:04:34 crc kubenswrapper[4605]: I1001 14:04:34.494628 4605 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9a5c924f-a949-47cd-802a-4246716c3504-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 14:04:34 crc kubenswrapper[4605]: I1001 14:04:34.494637 4605 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a5c924f-a949-47cd-802a-4246716c3504-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 14:04:34 crc kubenswrapper[4605]: I1001 14:04:34.951264 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-mfgwh" event={"ID":"9a5c924f-a949-47cd-802a-4246716c3504","Type":"ContainerDied","Data":"a6fd29e9c38743707a33b3b8c5e730ff6965e1555625f0d9bbd0f939dea0891c"} Oct 01 14:04:34 crc kubenswrapper[4605]: I1001 14:04:34.951517 4605 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a6fd29e9c38743707a33b3b8c5e730ff6965e1555625f0d9bbd0f939dea0891c" Oct 01 14:04:34 crc kubenswrapper[4605]: I1001 14:04:34.951640 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-mfgwh" Oct 01 14:04:35 crc kubenswrapper[4605]: I1001 14:04:35.069201 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 01 14:04:35 crc kubenswrapper[4605]: E1001 14:04:35.069947 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a5c924f-a949-47cd-802a-4246716c3504" containerName="nova-cell0-conductor-db-sync" Oct 01 14:04:35 crc kubenswrapper[4605]: I1001 14:04:35.070035 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a5c924f-a949-47cd-802a-4246716c3504" containerName="nova-cell0-conductor-db-sync" Oct 01 14:04:35 crc kubenswrapper[4605]: I1001 14:04:35.070367 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="9a5c924f-a949-47cd-802a-4246716c3504" containerName="nova-cell0-conductor-db-sync" Oct 01 14:04:35 crc kubenswrapper[4605]: I1001 14:04:35.071011 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 01 14:04:35 crc kubenswrapper[4605]: I1001 14:04:35.073759 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-8sv9g" Oct 01 14:04:35 crc kubenswrapper[4605]: I1001 14:04:35.076765 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Oct 01 14:04:35 crc kubenswrapper[4605]: I1001 14:04:35.101029 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 01 14:04:35 crc kubenswrapper[4605]: I1001 14:04:35.109408 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c93e48d5-b3b1-4390-bb46-308151d80e4e-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"c93e48d5-b3b1-4390-bb46-308151d80e4e\") " pod="openstack/nova-cell0-conductor-0" Oct 01 14:04:35 crc kubenswrapper[4605]: I1001 14:04:35.109519 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c93e48d5-b3b1-4390-bb46-308151d80e4e-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"c93e48d5-b3b1-4390-bb46-308151d80e4e\") " pod="openstack/nova-cell0-conductor-0" Oct 01 14:04:35 crc kubenswrapper[4605]: I1001 14:04:35.109610 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g29cp\" (UniqueName: \"kubernetes.io/projected/c93e48d5-b3b1-4390-bb46-308151d80e4e-kube-api-access-g29cp\") pod \"nova-cell0-conductor-0\" (UID: \"c93e48d5-b3b1-4390-bb46-308151d80e4e\") " pod="openstack/nova-cell0-conductor-0" Oct 01 14:04:35 crc kubenswrapper[4605]: I1001 14:04:35.210414 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g29cp\" (UniqueName: \"kubernetes.io/projected/c93e48d5-b3b1-4390-bb46-308151d80e4e-kube-api-access-g29cp\") pod \"nova-cell0-conductor-0\" (UID: \"c93e48d5-b3b1-4390-bb46-308151d80e4e\") " pod="openstack/nova-cell0-conductor-0" Oct 01 14:04:35 crc kubenswrapper[4605]: I1001 14:04:35.210855 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c93e48d5-b3b1-4390-bb46-308151d80e4e-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"c93e48d5-b3b1-4390-bb46-308151d80e4e\") " pod="openstack/nova-cell0-conductor-0" Oct 01 14:04:35 crc kubenswrapper[4605]: I1001 14:04:35.210950 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c93e48d5-b3b1-4390-bb46-308151d80e4e-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"c93e48d5-b3b1-4390-bb46-308151d80e4e\") " pod="openstack/nova-cell0-conductor-0" Oct 01 14:04:35 crc kubenswrapper[4605]: I1001 14:04:35.216192 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c93e48d5-b3b1-4390-bb46-308151d80e4e-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"c93e48d5-b3b1-4390-bb46-308151d80e4e\") " pod="openstack/nova-cell0-conductor-0" Oct 01 14:04:35 crc kubenswrapper[4605]: I1001 14:04:35.233717 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c93e48d5-b3b1-4390-bb46-308151d80e4e-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"c93e48d5-b3b1-4390-bb46-308151d80e4e\") " pod="openstack/nova-cell0-conductor-0" Oct 01 14:04:35 crc kubenswrapper[4605]: I1001 14:04:35.237592 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g29cp\" (UniqueName: \"kubernetes.io/projected/c93e48d5-b3b1-4390-bb46-308151d80e4e-kube-api-access-g29cp\") pod \"nova-cell0-conductor-0\" (UID: \"c93e48d5-b3b1-4390-bb46-308151d80e4e\") " pod="openstack/nova-cell0-conductor-0" Oct 01 14:04:35 crc kubenswrapper[4605]: I1001 14:04:35.394463 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 01 14:04:35 crc kubenswrapper[4605]: I1001 14:04:35.847795 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 01 14:04:35 crc kubenswrapper[4605]: I1001 14:04:35.966717 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"c93e48d5-b3b1-4390-bb46-308151d80e4e","Type":"ContainerStarted","Data":"4e29424580fd27711cee767351366817fa54cef77308a47bcc449e2509d838cb"} Oct 01 14:04:36 crc kubenswrapper[4605]: I1001 14:04:36.984313 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"c93e48d5-b3b1-4390-bb46-308151d80e4e","Type":"ContainerStarted","Data":"688a4a92df38dfaada004dc8abff58585138ab3d5a5ad6964900804fedd61c3b"} Oct 01 14:04:36 crc kubenswrapper[4605]: I1001 14:04:36.985475 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Oct 01 14:04:37 crc kubenswrapper[4605]: I1001 14:04:37.004025 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.00400233 podStartE2EDuration="2.00400233s" podCreationTimestamp="2025-10-01 14:04:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 14:04:36.997762852 +0000 UTC m=+1199.741739070" watchObservedRunningTime="2025-10-01 14:04:37.00400233 +0000 UTC m=+1199.747978538" Oct 01 14:04:45 crc kubenswrapper[4605]: I1001 14:04:45.425480 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Oct 01 14:04:45 crc kubenswrapper[4605]: I1001 14:04:45.944887 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-fh5rm"] Oct 01 14:04:45 crc kubenswrapper[4605]: I1001 14:04:45.947269 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-fh5rm" Oct 01 14:04:45 crc kubenswrapper[4605]: I1001 14:04:45.950896 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-fh5rm"] Oct 01 14:04:45 crc kubenswrapper[4605]: I1001 14:04:45.956058 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Oct 01 14:04:45 crc kubenswrapper[4605]: I1001 14:04:45.958788 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.021132 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44883e34-65b9-4eef-a5ae-75d88804c94f-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-fh5rm\" (UID: \"44883e34-65b9-4eef-a5ae-75d88804c94f\") " pod="openstack/nova-cell0-cell-mapping-fh5rm" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.021258 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l5rqt\" (UniqueName: \"kubernetes.io/projected/44883e34-65b9-4eef-a5ae-75d88804c94f-kube-api-access-l5rqt\") pod \"nova-cell0-cell-mapping-fh5rm\" (UID: \"44883e34-65b9-4eef-a5ae-75d88804c94f\") " pod="openstack/nova-cell0-cell-mapping-fh5rm" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.021306 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/44883e34-65b9-4eef-a5ae-75d88804c94f-config-data\") pod \"nova-cell0-cell-mapping-fh5rm\" (UID: \"44883e34-65b9-4eef-a5ae-75d88804c94f\") " pod="openstack/nova-cell0-cell-mapping-fh5rm" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.021325 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/44883e34-65b9-4eef-a5ae-75d88804c94f-scripts\") pod \"nova-cell0-cell-mapping-fh5rm\" (UID: \"44883e34-65b9-4eef-a5ae-75d88804c94f\") " pod="openstack/nova-cell0-cell-mapping-fh5rm" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.107439 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.116173 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.123385 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/44883e34-65b9-4eef-a5ae-75d88804c94f-config-data\") pod \"nova-cell0-cell-mapping-fh5rm\" (UID: \"44883e34-65b9-4eef-a5ae-75d88804c94f\") " pod="openstack/nova-cell0-cell-mapping-fh5rm" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.123469 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/44883e34-65b9-4eef-a5ae-75d88804c94f-scripts\") pod \"nova-cell0-cell-mapping-fh5rm\" (UID: \"44883e34-65b9-4eef-a5ae-75d88804c94f\") " pod="openstack/nova-cell0-cell-mapping-fh5rm" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.123617 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44883e34-65b9-4eef-a5ae-75d88804c94f-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-fh5rm\" (UID: \"44883e34-65b9-4eef-a5ae-75d88804c94f\") " pod="openstack/nova-cell0-cell-mapping-fh5rm" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.123864 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l5rqt\" (UniqueName: \"kubernetes.io/projected/44883e34-65b9-4eef-a5ae-75d88804c94f-kube-api-access-l5rqt\") pod \"nova-cell0-cell-mapping-fh5rm\" (UID: \"44883e34-65b9-4eef-a5ae-75d88804c94f\") " pod="openstack/nova-cell0-cell-mapping-fh5rm" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.126891 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.141945 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/44883e34-65b9-4eef-a5ae-75d88804c94f-scripts\") pod \"nova-cell0-cell-mapping-fh5rm\" (UID: \"44883e34-65b9-4eef-a5ae-75d88804c94f\") " pod="openstack/nova-cell0-cell-mapping-fh5rm" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.142991 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44883e34-65b9-4eef-a5ae-75d88804c94f-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-fh5rm\" (UID: \"44883e34-65b9-4eef-a5ae-75d88804c94f\") " pod="openstack/nova-cell0-cell-mapping-fh5rm" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.146071 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.157900 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/44883e34-65b9-4eef-a5ae-75d88804c94f-config-data\") pod \"nova-cell0-cell-mapping-fh5rm\" (UID: \"44883e34-65b9-4eef-a5ae-75d88804c94f\") " pod="openstack/nova-cell0-cell-mapping-fh5rm" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.225156 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tp94w\" (UniqueName: \"kubernetes.io/projected/f5ff5fdc-351d-40e4-9ecb-5bf4173987a0-kube-api-access-tp94w\") pod \"nova-scheduler-0\" (UID: \"f5ff5fdc-351d-40e4-9ecb-5bf4173987a0\") " pod="openstack/nova-scheduler-0" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.225204 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5ff5fdc-351d-40e4-9ecb-5bf4173987a0-config-data\") pod \"nova-scheduler-0\" (UID: \"f5ff5fdc-351d-40e4-9ecb-5bf4173987a0\") " pod="openstack/nova-scheduler-0" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.225269 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5ff5fdc-351d-40e4-9ecb-5bf4173987a0-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"f5ff5fdc-351d-40e4-9ecb-5bf4173987a0\") " pod="openstack/nova-scheduler-0" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.227691 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l5rqt\" (UniqueName: \"kubernetes.io/projected/44883e34-65b9-4eef-a5ae-75d88804c94f-kube-api-access-l5rqt\") pod \"nova-cell0-cell-mapping-fh5rm\" (UID: \"44883e34-65b9-4eef-a5ae-75d88804c94f\") " pod="openstack/nova-cell0-cell-mapping-fh5rm" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.283830 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.293745 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-fh5rm" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.294661 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.304469 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.327066 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tp94w\" (UniqueName: \"kubernetes.io/projected/f5ff5fdc-351d-40e4-9ecb-5bf4173987a0-kube-api-access-tp94w\") pod \"nova-scheduler-0\" (UID: \"f5ff5fdc-351d-40e4-9ecb-5bf4173987a0\") " pod="openstack/nova-scheduler-0" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.327122 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5ff5fdc-351d-40e4-9ecb-5bf4173987a0-config-data\") pod \"nova-scheduler-0\" (UID: \"f5ff5fdc-351d-40e4-9ecb-5bf4173987a0\") " pod="openstack/nova-scheduler-0" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.327148 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nhn2f\" (UniqueName: \"kubernetes.io/projected/699a0e0b-af6b-4f0e-a7ae-7312d9a0b10c-kube-api-access-nhn2f\") pod \"nova-metadata-0\" (UID: \"699a0e0b-af6b-4f0e-a7ae-7312d9a0b10c\") " pod="openstack/nova-metadata-0" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.327189 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/699a0e0b-af6b-4f0e-a7ae-7312d9a0b10c-config-data\") pod \"nova-metadata-0\" (UID: \"699a0e0b-af6b-4f0e-a7ae-7312d9a0b10c\") " pod="openstack/nova-metadata-0" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.327226 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5ff5fdc-351d-40e4-9ecb-5bf4173987a0-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"f5ff5fdc-351d-40e4-9ecb-5bf4173987a0\") " pod="openstack/nova-scheduler-0" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.327242 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/699a0e0b-af6b-4f0e-a7ae-7312d9a0b10c-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"699a0e0b-af6b-4f0e-a7ae-7312d9a0b10c\") " pod="openstack/nova-metadata-0" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.327280 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/699a0e0b-af6b-4f0e-a7ae-7312d9a0b10c-logs\") pod \"nova-metadata-0\" (UID: \"699a0e0b-af6b-4f0e-a7ae-7312d9a0b10c\") " pod="openstack/nova-metadata-0" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.330851 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5ff5fdc-351d-40e4-9ecb-5bf4173987a0-config-data\") pod \"nova-scheduler-0\" (UID: \"f5ff5fdc-351d-40e4-9ecb-5bf4173987a0\") " pod="openstack/nova-scheduler-0" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.352357 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5ff5fdc-351d-40e4-9ecb-5bf4173987a0-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"f5ff5fdc-351d-40e4-9ecb-5bf4173987a0\") " pod="openstack/nova-scheduler-0" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.384627 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.412546 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tp94w\" (UniqueName: \"kubernetes.io/projected/f5ff5fdc-351d-40e4-9ecb-5bf4173987a0-kube-api-access-tp94w\") pod \"nova-scheduler-0\" (UID: \"f5ff5fdc-351d-40e4-9ecb-5bf4173987a0\") " pod="openstack/nova-scheduler-0" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.428990 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/699a0e0b-af6b-4f0e-a7ae-7312d9a0b10c-config-data\") pod \"nova-metadata-0\" (UID: \"699a0e0b-af6b-4f0e-a7ae-7312d9a0b10c\") " pod="openstack/nova-metadata-0" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.429045 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/699a0e0b-af6b-4f0e-a7ae-7312d9a0b10c-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"699a0e0b-af6b-4f0e-a7ae-7312d9a0b10c\") " pod="openstack/nova-metadata-0" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.429087 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/699a0e0b-af6b-4f0e-a7ae-7312d9a0b10c-logs\") pod \"nova-metadata-0\" (UID: \"699a0e0b-af6b-4f0e-a7ae-7312d9a0b10c\") " pod="openstack/nova-metadata-0" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.429180 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nhn2f\" (UniqueName: \"kubernetes.io/projected/699a0e0b-af6b-4f0e-a7ae-7312d9a0b10c-kube-api-access-nhn2f\") pod \"nova-metadata-0\" (UID: \"699a0e0b-af6b-4f0e-a7ae-7312d9a0b10c\") " pod="openstack/nova-metadata-0" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.429816 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/699a0e0b-af6b-4f0e-a7ae-7312d9a0b10c-logs\") pod \"nova-metadata-0\" (UID: \"699a0e0b-af6b-4f0e-a7ae-7312d9a0b10c\") " pod="openstack/nova-metadata-0" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.445420 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/699a0e0b-af6b-4f0e-a7ae-7312d9a0b10c-config-data\") pod \"nova-metadata-0\" (UID: \"699a0e0b-af6b-4f0e-a7ae-7312d9a0b10c\") " pod="openstack/nova-metadata-0" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.452362 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/699a0e0b-af6b-4f0e-a7ae-7312d9a0b10c-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"699a0e0b-af6b-4f0e-a7ae-7312d9a0b10c\") " pod="openstack/nova-metadata-0" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.494670 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nhn2f\" (UniqueName: \"kubernetes.io/projected/699a0e0b-af6b-4f0e-a7ae-7312d9a0b10c-kube-api-access-nhn2f\") pod \"nova-metadata-0\" (UID: \"699a0e0b-af6b-4f0e-a7ae-7312d9a0b10c\") " pod="openstack/nova-metadata-0" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.528829 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.536427 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.544525 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.546310 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.564410 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.600492 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.611896 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.634054 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.637254 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-llpx4"] Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.638781 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-845d6d6f59-llpx4" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.674268 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.739503 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-llpx4"] Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.741242 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/44a71971-a13c-48ae-b60f-e0fb01c4a7f9-ovsdbserver-nb\") pod \"dnsmasq-dns-845d6d6f59-llpx4\" (UID: \"44a71971-a13c-48ae-b60f-e0fb01c4a7f9\") " pod="openstack/dnsmasq-dns-845d6d6f59-llpx4" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.741268 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5xj9w\" (UniqueName: \"kubernetes.io/projected/272aa35e-0ea5-43bc-996a-d3f010dc94ac-kube-api-access-5xj9w\") pod \"nova-cell1-novncproxy-0\" (UID: \"272aa35e-0ea5-43bc-996a-d3f010dc94ac\") " pod="openstack/nova-cell1-novncproxy-0" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.741292 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/272aa35e-0ea5-43bc-996a-d3f010dc94ac-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"272aa35e-0ea5-43bc-996a-d3f010dc94ac\") " pod="openstack/nova-cell1-novncproxy-0" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.741316 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jl949\" (UniqueName: \"kubernetes.io/projected/0423df9d-25a8-45f3-9a48-f8f01d526f37-kube-api-access-jl949\") pod \"nova-api-0\" (UID: \"0423df9d-25a8-45f3-9a48-f8f01d526f37\") " pod="openstack/nova-api-0" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.741340 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0423df9d-25a8-45f3-9a48-f8f01d526f37-config-data\") pod \"nova-api-0\" (UID: \"0423df9d-25a8-45f3-9a48-f8f01d526f37\") " pod="openstack/nova-api-0" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.741366 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/44a71971-a13c-48ae-b60f-e0fb01c4a7f9-ovsdbserver-sb\") pod \"dnsmasq-dns-845d6d6f59-llpx4\" (UID: \"44a71971-a13c-48ae-b60f-e0fb01c4a7f9\") " pod="openstack/dnsmasq-dns-845d6d6f59-llpx4" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.741390 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/44a71971-a13c-48ae-b60f-e0fb01c4a7f9-config\") pod \"dnsmasq-dns-845d6d6f59-llpx4\" (UID: \"44a71971-a13c-48ae-b60f-e0fb01c4a7f9\") " pod="openstack/dnsmasq-dns-845d6d6f59-llpx4" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.741411 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-75wqd\" (UniqueName: \"kubernetes.io/projected/44a71971-a13c-48ae-b60f-e0fb01c4a7f9-kube-api-access-75wqd\") pod \"dnsmasq-dns-845d6d6f59-llpx4\" (UID: \"44a71971-a13c-48ae-b60f-e0fb01c4a7f9\") " pod="openstack/dnsmasq-dns-845d6d6f59-llpx4" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.741446 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/44a71971-a13c-48ae-b60f-e0fb01c4a7f9-dns-svc\") pod \"dnsmasq-dns-845d6d6f59-llpx4\" (UID: \"44a71971-a13c-48ae-b60f-e0fb01c4a7f9\") " pod="openstack/dnsmasq-dns-845d6d6f59-llpx4" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.741476 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0423df9d-25a8-45f3-9a48-f8f01d526f37-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"0423df9d-25a8-45f3-9a48-f8f01d526f37\") " pod="openstack/nova-api-0" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.741505 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/272aa35e-0ea5-43bc-996a-d3f010dc94ac-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"272aa35e-0ea5-43bc-996a-d3f010dc94ac\") " pod="openstack/nova-cell1-novncproxy-0" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.741528 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0423df9d-25a8-45f3-9a48-f8f01d526f37-logs\") pod \"nova-api-0\" (UID: \"0423df9d-25a8-45f3-9a48-f8f01d526f37\") " pod="openstack/nova-api-0" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.741553 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/44a71971-a13c-48ae-b60f-e0fb01c4a7f9-dns-swift-storage-0\") pod \"dnsmasq-dns-845d6d6f59-llpx4\" (UID: \"44a71971-a13c-48ae-b60f-e0fb01c4a7f9\") " pod="openstack/dnsmasq-dns-845d6d6f59-llpx4" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.750912 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.843135 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/44a71971-a13c-48ae-b60f-e0fb01c4a7f9-ovsdbserver-nb\") pod \"dnsmasq-dns-845d6d6f59-llpx4\" (UID: \"44a71971-a13c-48ae-b60f-e0fb01c4a7f9\") " pod="openstack/dnsmasq-dns-845d6d6f59-llpx4" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.843182 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5xj9w\" (UniqueName: \"kubernetes.io/projected/272aa35e-0ea5-43bc-996a-d3f010dc94ac-kube-api-access-5xj9w\") pod \"nova-cell1-novncproxy-0\" (UID: \"272aa35e-0ea5-43bc-996a-d3f010dc94ac\") " pod="openstack/nova-cell1-novncproxy-0" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.843202 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/272aa35e-0ea5-43bc-996a-d3f010dc94ac-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"272aa35e-0ea5-43bc-996a-d3f010dc94ac\") " pod="openstack/nova-cell1-novncproxy-0" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.843221 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jl949\" (UniqueName: \"kubernetes.io/projected/0423df9d-25a8-45f3-9a48-f8f01d526f37-kube-api-access-jl949\") pod \"nova-api-0\" (UID: \"0423df9d-25a8-45f3-9a48-f8f01d526f37\") " pod="openstack/nova-api-0" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.843240 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0423df9d-25a8-45f3-9a48-f8f01d526f37-config-data\") pod \"nova-api-0\" (UID: \"0423df9d-25a8-45f3-9a48-f8f01d526f37\") " pod="openstack/nova-api-0" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.843264 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/44a71971-a13c-48ae-b60f-e0fb01c4a7f9-ovsdbserver-sb\") pod \"dnsmasq-dns-845d6d6f59-llpx4\" (UID: \"44a71971-a13c-48ae-b60f-e0fb01c4a7f9\") " pod="openstack/dnsmasq-dns-845d6d6f59-llpx4" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.843285 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/44a71971-a13c-48ae-b60f-e0fb01c4a7f9-config\") pod \"dnsmasq-dns-845d6d6f59-llpx4\" (UID: \"44a71971-a13c-48ae-b60f-e0fb01c4a7f9\") " pod="openstack/dnsmasq-dns-845d6d6f59-llpx4" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.843302 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-75wqd\" (UniqueName: \"kubernetes.io/projected/44a71971-a13c-48ae-b60f-e0fb01c4a7f9-kube-api-access-75wqd\") pod \"dnsmasq-dns-845d6d6f59-llpx4\" (UID: \"44a71971-a13c-48ae-b60f-e0fb01c4a7f9\") " pod="openstack/dnsmasq-dns-845d6d6f59-llpx4" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.843328 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/44a71971-a13c-48ae-b60f-e0fb01c4a7f9-dns-svc\") pod \"dnsmasq-dns-845d6d6f59-llpx4\" (UID: \"44a71971-a13c-48ae-b60f-e0fb01c4a7f9\") " pod="openstack/dnsmasq-dns-845d6d6f59-llpx4" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.843355 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0423df9d-25a8-45f3-9a48-f8f01d526f37-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"0423df9d-25a8-45f3-9a48-f8f01d526f37\") " pod="openstack/nova-api-0" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.843382 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/272aa35e-0ea5-43bc-996a-d3f010dc94ac-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"272aa35e-0ea5-43bc-996a-d3f010dc94ac\") " pod="openstack/nova-cell1-novncproxy-0" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.843399 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0423df9d-25a8-45f3-9a48-f8f01d526f37-logs\") pod \"nova-api-0\" (UID: \"0423df9d-25a8-45f3-9a48-f8f01d526f37\") " pod="openstack/nova-api-0" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.843417 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/44a71971-a13c-48ae-b60f-e0fb01c4a7f9-dns-swift-storage-0\") pod \"dnsmasq-dns-845d6d6f59-llpx4\" (UID: \"44a71971-a13c-48ae-b60f-e0fb01c4a7f9\") " pod="openstack/dnsmasq-dns-845d6d6f59-llpx4" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.844371 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/44a71971-a13c-48ae-b60f-e0fb01c4a7f9-dns-swift-storage-0\") pod \"dnsmasq-dns-845d6d6f59-llpx4\" (UID: \"44a71971-a13c-48ae-b60f-e0fb01c4a7f9\") " pod="openstack/dnsmasq-dns-845d6d6f59-llpx4" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.844480 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/44a71971-a13c-48ae-b60f-e0fb01c4a7f9-ovsdbserver-nb\") pod \"dnsmasq-dns-845d6d6f59-llpx4\" (UID: \"44a71971-a13c-48ae-b60f-e0fb01c4a7f9\") " pod="openstack/dnsmasq-dns-845d6d6f59-llpx4" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.847518 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/44a71971-a13c-48ae-b60f-e0fb01c4a7f9-dns-svc\") pod \"dnsmasq-dns-845d6d6f59-llpx4\" (UID: \"44a71971-a13c-48ae-b60f-e0fb01c4a7f9\") " pod="openstack/dnsmasq-dns-845d6d6f59-llpx4" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.848322 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/44a71971-a13c-48ae-b60f-e0fb01c4a7f9-config\") pod \"dnsmasq-dns-845d6d6f59-llpx4\" (UID: \"44a71971-a13c-48ae-b60f-e0fb01c4a7f9\") " pod="openstack/dnsmasq-dns-845d6d6f59-llpx4" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.848667 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0423df9d-25a8-45f3-9a48-f8f01d526f37-logs\") pod \"nova-api-0\" (UID: \"0423df9d-25a8-45f3-9a48-f8f01d526f37\") " pod="openstack/nova-api-0" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.848667 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/44a71971-a13c-48ae-b60f-e0fb01c4a7f9-ovsdbserver-sb\") pod \"dnsmasq-dns-845d6d6f59-llpx4\" (UID: \"44a71971-a13c-48ae-b60f-e0fb01c4a7f9\") " pod="openstack/dnsmasq-dns-845d6d6f59-llpx4" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.864723 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0423df9d-25a8-45f3-9a48-f8f01d526f37-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"0423df9d-25a8-45f3-9a48-f8f01d526f37\") " pod="openstack/nova-api-0" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.864800 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0423df9d-25a8-45f3-9a48-f8f01d526f37-config-data\") pod \"nova-api-0\" (UID: \"0423df9d-25a8-45f3-9a48-f8f01d526f37\") " pod="openstack/nova-api-0" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.865999 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/272aa35e-0ea5-43bc-996a-d3f010dc94ac-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"272aa35e-0ea5-43bc-996a-d3f010dc94ac\") " pod="openstack/nova-cell1-novncproxy-0" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.868153 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/272aa35e-0ea5-43bc-996a-d3f010dc94ac-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"272aa35e-0ea5-43bc-996a-d3f010dc94ac\") " pod="openstack/nova-cell1-novncproxy-0" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.872652 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-75wqd\" (UniqueName: \"kubernetes.io/projected/44a71971-a13c-48ae-b60f-e0fb01c4a7f9-kube-api-access-75wqd\") pod \"dnsmasq-dns-845d6d6f59-llpx4\" (UID: \"44a71971-a13c-48ae-b60f-e0fb01c4a7f9\") " pod="openstack/dnsmasq-dns-845d6d6f59-llpx4" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.875584 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5xj9w\" (UniqueName: \"kubernetes.io/projected/272aa35e-0ea5-43bc-996a-d3f010dc94ac-kube-api-access-5xj9w\") pod \"nova-cell1-novncproxy-0\" (UID: \"272aa35e-0ea5-43bc-996a-d3f010dc94ac\") " pod="openstack/nova-cell1-novncproxy-0" Oct 01 14:04:46 crc kubenswrapper[4605]: I1001 14:04:46.884729 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jl949\" (UniqueName: \"kubernetes.io/projected/0423df9d-25a8-45f3-9a48-f8f01d526f37-kube-api-access-jl949\") pod \"nova-api-0\" (UID: \"0423df9d-25a8-45f3-9a48-f8f01d526f37\") " pod="openstack/nova-api-0" Oct 01 14:04:47 crc kubenswrapper[4605]: I1001 14:04:47.047829 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 01 14:04:47 crc kubenswrapper[4605]: I1001 14:04:47.067600 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 01 14:04:47 crc kubenswrapper[4605]: I1001 14:04:47.070963 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-fh5rm"] Oct 01 14:04:47 crc kubenswrapper[4605]: I1001 14:04:47.080405 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-845d6d6f59-llpx4" Oct 01 14:04:47 crc kubenswrapper[4605]: I1001 14:04:47.308514 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 01 14:04:47 crc kubenswrapper[4605]: I1001 14:04:47.490152 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 01 14:04:47 crc kubenswrapper[4605]: I1001 14:04:47.785299 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-78hpx"] Oct 01 14:04:47 crc kubenswrapper[4605]: I1001 14:04:47.787020 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-78hpx" Oct 01 14:04:47 crc kubenswrapper[4605]: I1001 14:04:47.792485 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Oct 01 14:04:47 crc kubenswrapper[4605]: I1001 14:04:47.792613 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Oct 01 14:04:47 crc kubenswrapper[4605]: I1001 14:04:47.831247 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 01 14:04:47 crc kubenswrapper[4605]: W1001 14:04:47.839850 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod44a71971_a13c_48ae_b60f_e0fb01c4a7f9.slice/crio-848bb64d32647c3f17013a7b172fb7040018d4b78054df108ec1859cd1535035 WatchSource:0}: Error finding container 848bb64d32647c3f17013a7b172fb7040018d4b78054df108ec1859cd1535035: Status 404 returned error can't find the container with id 848bb64d32647c3f17013a7b172fb7040018d4b78054df108ec1859cd1535035 Oct 01 14:04:47 crc kubenswrapper[4605]: I1001 14:04:47.848431 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-78hpx"] Oct 01 14:04:47 crc kubenswrapper[4605]: I1001 14:04:47.868013 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-llpx4"] Oct 01 14:04:47 crc kubenswrapper[4605]: I1001 14:04:47.871940 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d1195cd-cd23-4f48-a14a-f2ee21a0c0e2-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-78hpx\" (UID: \"6d1195cd-cd23-4f48-a14a-f2ee21a0c0e2\") " pod="openstack/nova-cell1-conductor-db-sync-78hpx" Oct 01 14:04:47 crc kubenswrapper[4605]: I1001 14:04:47.872245 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w92dr\" (UniqueName: \"kubernetes.io/projected/6d1195cd-cd23-4f48-a14a-f2ee21a0c0e2-kube-api-access-w92dr\") pod \"nova-cell1-conductor-db-sync-78hpx\" (UID: \"6d1195cd-cd23-4f48-a14a-f2ee21a0c0e2\") " pod="openstack/nova-cell1-conductor-db-sync-78hpx" Oct 01 14:04:47 crc kubenswrapper[4605]: I1001 14:04:47.872352 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6d1195cd-cd23-4f48-a14a-f2ee21a0c0e2-scripts\") pod \"nova-cell1-conductor-db-sync-78hpx\" (UID: \"6d1195cd-cd23-4f48-a14a-f2ee21a0c0e2\") " pod="openstack/nova-cell1-conductor-db-sync-78hpx" Oct 01 14:04:47 crc kubenswrapper[4605]: I1001 14:04:47.872575 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d1195cd-cd23-4f48-a14a-f2ee21a0c0e2-config-data\") pod \"nova-cell1-conductor-db-sync-78hpx\" (UID: \"6d1195cd-cd23-4f48-a14a-f2ee21a0c0e2\") " pod="openstack/nova-cell1-conductor-db-sync-78hpx" Oct 01 14:04:47 crc kubenswrapper[4605]: I1001 14:04:47.970891 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 01 14:04:47 crc kubenswrapper[4605]: I1001 14:04:47.979851 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d1195cd-cd23-4f48-a14a-f2ee21a0c0e2-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-78hpx\" (UID: \"6d1195cd-cd23-4f48-a14a-f2ee21a0c0e2\") " pod="openstack/nova-cell1-conductor-db-sync-78hpx" Oct 01 14:04:47 crc kubenswrapper[4605]: I1001 14:04:47.979943 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w92dr\" (UniqueName: \"kubernetes.io/projected/6d1195cd-cd23-4f48-a14a-f2ee21a0c0e2-kube-api-access-w92dr\") pod \"nova-cell1-conductor-db-sync-78hpx\" (UID: \"6d1195cd-cd23-4f48-a14a-f2ee21a0c0e2\") " pod="openstack/nova-cell1-conductor-db-sync-78hpx" Oct 01 14:04:47 crc kubenswrapper[4605]: I1001 14:04:47.979989 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6d1195cd-cd23-4f48-a14a-f2ee21a0c0e2-scripts\") pod \"nova-cell1-conductor-db-sync-78hpx\" (UID: \"6d1195cd-cd23-4f48-a14a-f2ee21a0c0e2\") " pod="openstack/nova-cell1-conductor-db-sync-78hpx" Oct 01 14:04:47 crc kubenswrapper[4605]: I1001 14:04:47.980084 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d1195cd-cd23-4f48-a14a-f2ee21a0c0e2-config-data\") pod \"nova-cell1-conductor-db-sync-78hpx\" (UID: \"6d1195cd-cd23-4f48-a14a-f2ee21a0c0e2\") " pod="openstack/nova-cell1-conductor-db-sync-78hpx" Oct 01 14:04:48 crc kubenswrapper[4605]: I1001 14:04:48.007844 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6d1195cd-cd23-4f48-a14a-f2ee21a0c0e2-scripts\") pod \"nova-cell1-conductor-db-sync-78hpx\" (UID: \"6d1195cd-cd23-4f48-a14a-f2ee21a0c0e2\") " pod="openstack/nova-cell1-conductor-db-sync-78hpx" Oct 01 14:04:48 crc kubenswrapper[4605]: I1001 14:04:48.009576 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d1195cd-cd23-4f48-a14a-f2ee21a0c0e2-config-data\") pod \"nova-cell1-conductor-db-sync-78hpx\" (UID: \"6d1195cd-cd23-4f48-a14a-f2ee21a0c0e2\") " pod="openstack/nova-cell1-conductor-db-sync-78hpx" Oct 01 14:04:48 crc kubenswrapper[4605]: I1001 14:04:48.023750 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d1195cd-cd23-4f48-a14a-f2ee21a0c0e2-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-78hpx\" (UID: \"6d1195cd-cd23-4f48-a14a-f2ee21a0c0e2\") " pod="openstack/nova-cell1-conductor-db-sync-78hpx" Oct 01 14:04:48 crc kubenswrapper[4605]: I1001 14:04:48.025786 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w92dr\" (UniqueName: \"kubernetes.io/projected/6d1195cd-cd23-4f48-a14a-f2ee21a0c0e2-kube-api-access-w92dr\") pod \"nova-cell1-conductor-db-sync-78hpx\" (UID: \"6d1195cd-cd23-4f48-a14a-f2ee21a0c0e2\") " pod="openstack/nova-cell1-conductor-db-sync-78hpx" Oct 01 14:04:48 crc kubenswrapper[4605]: I1001 14:04:48.117869 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-78hpx" Oct 01 14:04:48 crc kubenswrapper[4605]: I1001 14:04:48.139692 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"272aa35e-0ea5-43bc-996a-d3f010dc94ac","Type":"ContainerStarted","Data":"1dc90f68148ccdc97e44c85deab406bbc81f8c5636dc5aceabe7f6174ae53149"} Oct 01 14:04:48 crc kubenswrapper[4605]: I1001 14:04:48.253349 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-fh5rm" event={"ID":"44883e34-65b9-4eef-a5ae-75d88804c94f","Type":"ContainerStarted","Data":"79d8c909cce78c5a584cf96f0aac7f210acfc1fdbcc47a9252a30626b2e2a6d2"} Oct 01 14:04:48 crc kubenswrapper[4605]: I1001 14:04:48.253414 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-fh5rm" event={"ID":"44883e34-65b9-4eef-a5ae-75d88804c94f","Type":"ContainerStarted","Data":"ff1c5f11ec6b079d9d918cd2db7e09697ea21622718723ecb3e43254ca64c600"} Oct 01 14:04:48 crc kubenswrapper[4605]: I1001 14:04:48.265998 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"699a0e0b-af6b-4f0e-a7ae-7312d9a0b10c","Type":"ContainerStarted","Data":"6a31bba209da85f1d37f251b9b9d3e3893c820a479cb655b193f06cea9885b1f"} Oct 01 14:04:48 crc kubenswrapper[4605]: I1001 14:04:48.273191 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"0423df9d-25a8-45f3-9a48-f8f01d526f37","Type":"ContainerStarted","Data":"1115ed16bdff5d010b3322aebf07f6c887d985fa8fea9c2d0d1fae7229a4b6a3"} Oct 01 14:04:48 crc kubenswrapper[4605]: I1001 14:04:48.275237 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"f5ff5fdc-351d-40e4-9ecb-5bf4173987a0","Type":"ContainerStarted","Data":"46358cd9b87c25b4deb2e2ad16aa5d63b5dfc3f5db01dc4181a42b844de08321"} Oct 01 14:04:48 crc kubenswrapper[4605]: I1001 14:04:48.280172 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-845d6d6f59-llpx4" event={"ID":"44a71971-a13c-48ae-b60f-e0fb01c4a7f9","Type":"ContainerStarted","Data":"848bb64d32647c3f17013a7b172fb7040018d4b78054df108ec1859cd1535035"} Oct 01 14:04:48 crc kubenswrapper[4605]: I1001 14:04:48.294569 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-fh5rm" podStartSLOduration=3.294549505 podStartE2EDuration="3.294549505s" podCreationTimestamp="2025-10-01 14:04:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 14:04:48.292867163 +0000 UTC m=+1211.036843371" watchObservedRunningTime="2025-10-01 14:04:48.294549505 +0000 UTC m=+1211.038525713" Oct 01 14:04:48 crc kubenswrapper[4605]: I1001 14:04:48.769329 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-78hpx"] Oct 01 14:04:48 crc kubenswrapper[4605]: W1001 14:04:48.822387 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6d1195cd_cd23_4f48_a14a_f2ee21a0c0e2.slice/crio-3cba051c2f61caaaf0319888838db4ece69aa2a290a5dcad88926a16372ed992 WatchSource:0}: Error finding container 3cba051c2f61caaaf0319888838db4ece69aa2a290a5dcad88926a16372ed992: Status 404 returned error can't find the container with id 3cba051c2f61caaaf0319888838db4ece69aa2a290a5dcad88926a16372ed992 Oct 01 14:04:49 crc kubenswrapper[4605]: I1001 14:04:49.323170 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-78hpx" event={"ID":"6d1195cd-cd23-4f48-a14a-f2ee21a0c0e2","Type":"ContainerStarted","Data":"409e3f3177d2af1bb27ec05ffd965bc3ec8187272d7ca9d0028809254304f380"} Oct 01 14:04:49 crc kubenswrapper[4605]: I1001 14:04:49.323410 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-78hpx" event={"ID":"6d1195cd-cd23-4f48-a14a-f2ee21a0c0e2","Type":"ContainerStarted","Data":"3cba051c2f61caaaf0319888838db4ece69aa2a290a5dcad88926a16372ed992"} Oct 01 14:04:49 crc kubenswrapper[4605]: I1001 14:04:49.329408 4605 generic.go:334] "Generic (PLEG): container finished" podID="44a71971-a13c-48ae-b60f-e0fb01c4a7f9" containerID="2288a5677210c3bebf2c134d0b5a6afe678df817037887e1305f37324afb2299" exitCode=0 Oct 01 14:04:49 crc kubenswrapper[4605]: I1001 14:04:49.330857 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-845d6d6f59-llpx4" event={"ID":"44a71971-a13c-48ae-b60f-e0fb01c4a7f9","Type":"ContainerDied","Data":"2288a5677210c3bebf2c134d0b5a6afe678df817037887e1305f37324afb2299"} Oct 01 14:04:49 crc kubenswrapper[4605]: I1001 14:04:49.342322 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-78hpx" podStartSLOduration=2.342303658 podStartE2EDuration="2.342303658s" podCreationTimestamp="2025-10-01 14:04:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 14:04:49.34121895 +0000 UTC m=+1212.085195158" watchObservedRunningTime="2025-10-01 14:04:49.342303658 +0000 UTC m=+1212.086279866" Oct 01 14:04:50 crc kubenswrapper[4605]: I1001 14:04:50.867148 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 01 14:04:50 crc kubenswrapper[4605]: I1001 14:04:50.902929 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 01 14:04:52 crc kubenswrapper[4605]: I1001 14:04:52.257931 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Oct 01 14:04:52 crc kubenswrapper[4605]: I1001 14:04:52.374639 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"272aa35e-0ea5-43bc-996a-d3f010dc94ac","Type":"ContainerStarted","Data":"4cc13cd50b58ff7ceb886e971b88a8db673aba466c79ec59bdeeeaa9e14afcf0"} Oct 01 14:04:52 crc kubenswrapper[4605]: I1001 14:04:52.374767 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="272aa35e-0ea5-43bc-996a-d3f010dc94ac" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://4cc13cd50b58ff7ceb886e971b88a8db673aba466c79ec59bdeeeaa9e14afcf0" gracePeriod=30 Oct 01 14:04:52 crc kubenswrapper[4605]: I1001 14:04:52.378575 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"699a0e0b-af6b-4f0e-a7ae-7312d9a0b10c","Type":"ContainerStarted","Data":"e63a9c9fa0be55ac8f9bf99febd858d93a221af5fe4caed4032d9a8fb8c6984f"} Oct 01 14:04:52 crc kubenswrapper[4605]: I1001 14:04:52.378618 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"699a0e0b-af6b-4f0e-a7ae-7312d9a0b10c","Type":"ContainerStarted","Data":"048bfa2a9b573d02c9a76af2b4713db677ff87697f741183e2a1c33999f6ad28"} Oct 01 14:04:52 crc kubenswrapper[4605]: I1001 14:04:52.378733 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="699a0e0b-af6b-4f0e-a7ae-7312d9a0b10c" containerName="nova-metadata-log" containerID="cri-o://048bfa2a9b573d02c9a76af2b4713db677ff87697f741183e2a1c33999f6ad28" gracePeriod=30 Oct 01 14:04:52 crc kubenswrapper[4605]: I1001 14:04:52.378824 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="699a0e0b-af6b-4f0e-a7ae-7312d9a0b10c" containerName="nova-metadata-metadata" containerID="cri-o://e63a9c9fa0be55ac8f9bf99febd858d93a221af5fe4caed4032d9a8fb8c6984f" gracePeriod=30 Oct 01 14:04:52 crc kubenswrapper[4605]: I1001 14:04:52.384334 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"0423df9d-25a8-45f3-9a48-f8f01d526f37","Type":"ContainerStarted","Data":"57c6936a6e4b8eded978cef75d00b2811593015696181d14b906313bf23ece31"} Oct 01 14:04:52 crc kubenswrapper[4605]: I1001 14:04:52.384393 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"0423df9d-25a8-45f3-9a48-f8f01d526f37","Type":"ContainerStarted","Data":"f09336d890304474dcd55d9f8ca93991cd0c6c655c452a3e0a5f4d7ec1a7af98"} Oct 01 14:04:52 crc kubenswrapper[4605]: I1001 14:04:52.392323 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"f5ff5fdc-351d-40e4-9ecb-5bf4173987a0","Type":"ContainerStarted","Data":"4c0d4b188b494cae5d9bc2598a1d30e0bb304bc09406ed468dfbfb920b3a7046"} Oct 01 14:04:52 crc kubenswrapper[4605]: I1001 14:04:52.403815 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.608494979 podStartE2EDuration="6.403793536s" podCreationTimestamp="2025-10-01 14:04:46 +0000 UTC" firstStartedPulling="2025-10-01 14:04:47.802771681 +0000 UTC m=+1210.546747889" lastFinishedPulling="2025-10-01 14:04:51.598070238 +0000 UTC m=+1214.342046446" observedRunningTime="2025-10-01 14:04:52.397032025 +0000 UTC m=+1215.141008233" watchObservedRunningTime="2025-10-01 14:04:52.403793536 +0000 UTC m=+1215.147769744" Oct 01 14:04:52 crc kubenswrapper[4605]: I1001 14:04:52.413528 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-845d6d6f59-llpx4" event={"ID":"44a71971-a13c-48ae-b60f-e0fb01c4a7f9","Type":"ContainerStarted","Data":"0b7f92f09775f039e8efb146ca61866701e3a075f2a33935a753725fa9744c1b"} Oct 01 14:04:52 crc kubenswrapper[4605]: I1001 14:04:52.414557 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-845d6d6f59-llpx4" Oct 01 14:04:52 crc kubenswrapper[4605]: I1001 14:04:52.431453 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.369022449 podStartE2EDuration="6.431430915s" podCreationTimestamp="2025-10-01 14:04:46 +0000 UTC" firstStartedPulling="2025-10-01 14:04:47.539739495 +0000 UTC m=+1210.283715703" lastFinishedPulling="2025-10-01 14:04:51.602147961 +0000 UTC m=+1214.346124169" observedRunningTime="2025-10-01 14:04:52.425163847 +0000 UTC m=+1215.169140065" watchObservedRunningTime="2025-10-01 14:04:52.431430915 +0000 UTC m=+1215.175407123" Oct 01 14:04:52 crc kubenswrapper[4605]: I1001 14:04:52.498861 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.9133626440000002 podStartE2EDuration="6.498833731s" podCreationTimestamp="2025-10-01 14:04:46 +0000 UTC" firstStartedPulling="2025-10-01 14:04:48.012642002 +0000 UTC m=+1210.756618210" lastFinishedPulling="2025-10-01 14:04:51.598113089 +0000 UTC m=+1214.342089297" observedRunningTime="2025-10-01 14:04:52.45926048 +0000 UTC m=+1215.203236688" watchObservedRunningTime="2025-10-01 14:04:52.498833731 +0000 UTC m=+1215.242809939" Oct 01 14:04:52 crc kubenswrapper[4605]: I1001 14:04:52.499485 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.254391998 podStartE2EDuration="6.499480517s" podCreationTimestamp="2025-10-01 14:04:46 +0000 UTC" firstStartedPulling="2025-10-01 14:04:47.357031642 +0000 UTC m=+1210.101007850" lastFinishedPulling="2025-10-01 14:04:51.602120161 +0000 UTC m=+1214.346096369" observedRunningTime="2025-10-01 14:04:52.488014007 +0000 UTC m=+1215.231990215" watchObservedRunningTime="2025-10-01 14:04:52.499480517 +0000 UTC m=+1215.243456725" Oct 01 14:04:52 crc kubenswrapper[4605]: I1001 14:04:52.536838 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-845d6d6f59-llpx4" podStartSLOduration=6.536821412 podStartE2EDuration="6.536821412s" podCreationTimestamp="2025-10-01 14:04:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 14:04:52.528669246 +0000 UTC m=+1215.272645454" watchObservedRunningTime="2025-10-01 14:04:52.536821412 +0000 UTC m=+1215.280797620" Oct 01 14:04:53 crc kubenswrapper[4605]: I1001 14:04:53.424781 4605 generic.go:334] "Generic (PLEG): container finished" podID="699a0e0b-af6b-4f0e-a7ae-7312d9a0b10c" containerID="048bfa2a9b573d02c9a76af2b4713db677ff87697f741183e2a1c33999f6ad28" exitCode=143 Oct 01 14:04:53 crc kubenswrapper[4605]: I1001 14:04:53.425910 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"699a0e0b-af6b-4f0e-a7ae-7312d9a0b10c","Type":"ContainerDied","Data":"048bfa2a9b573d02c9a76af2b4713db677ff87697f741183e2a1c33999f6ad28"} Oct 01 14:04:56 crc kubenswrapper[4605]: I1001 14:04:56.515952 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 01 14:04:56 crc kubenswrapper[4605]: I1001 14:04:56.516724 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="fd313e13-49ce-49c2-be82-6ca3c0fbb2ed" containerName="kube-state-metrics" containerID="cri-o://c86da58e244c9ce37241f58f84c24a81ed6db6946557e5d54be22c53fc35b6c4" gracePeriod=30 Oct 01 14:04:56 crc kubenswrapper[4605]: I1001 14:04:56.529955 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Oct 01 14:04:56 crc kubenswrapper[4605]: I1001 14:04:56.530000 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Oct 01 14:04:56 crc kubenswrapper[4605]: I1001 14:04:56.561076 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Oct 01 14:04:56 crc kubenswrapper[4605]: I1001 14:04:56.754831 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 01 14:04:56 crc kubenswrapper[4605]: I1001 14:04:56.754877 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 01 14:04:56 crc kubenswrapper[4605]: I1001 14:04:56.998084 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 01 14:04:57 crc kubenswrapper[4605]: I1001 14:04:57.048565 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 01 14:04:57 crc kubenswrapper[4605]: I1001 14:04:57.048620 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 01 14:04:57 crc kubenswrapper[4605]: I1001 14:04:57.069198 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Oct 01 14:04:57 crc kubenswrapper[4605]: I1001 14:04:57.080079 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mcvhs\" (UniqueName: \"kubernetes.io/projected/fd313e13-49ce-49c2-be82-6ca3c0fbb2ed-kube-api-access-mcvhs\") pod \"fd313e13-49ce-49c2-be82-6ca3c0fbb2ed\" (UID: \"fd313e13-49ce-49c2-be82-6ca3c0fbb2ed\") " Oct 01 14:04:57 crc kubenswrapper[4605]: I1001 14:04:57.082325 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-845d6d6f59-llpx4" Oct 01 14:04:57 crc kubenswrapper[4605]: I1001 14:04:57.118641 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fd313e13-49ce-49c2-be82-6ca3c0fbb2ed-kube-api-access-mcvhs" (OuterVolumeSpecName: "kube-api-access-mcvhs") pod "fd313e13-49ce-49c2-be82-6ca3c0fbb2ed" (UID: "fd313e13-49ce-49c2-be82-6ca3c0fbb2ed"). InnerVolumeSpecName "kube-api-access-mcvhs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:04:57 crc kubenswrapper[4605]: I1001 14:04:57.183342 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mcvhs\" (UniqueName: \"kubernetes.io/projected/fd313e13-49ce-49c2-be82-6ca3c0fbb2ed-kube-api-access-mcvhs\") on node \"crc\" DevicePath \"\"" Oct 01 14:04:57 crc kubenswrapper[4605]: I1001 14:04:57.312121 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-8rr76"] Oct 01 14:04:57 crc kubenswrapper[4605]: I1001 14:04:57.312353 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5784cf869f-8rr76" podUID="ed331d86-1f29-4a49-a436-2e7f9c55a2d0" containerName="dnsmasq-dns" containerID="cri-o://1f76b559baad8105afd4645aaa68235d885c9fefea55e231d8b64126ae9cf55c" gracePeriod=10 Oct 01 14:04:57 crc kubenswrapper[4605]: I1001 14:04:57.493408 4605 generic.go:334] "Generic (PLEG): container finished" podID="44883e34-65b9-4eef-a5ae-75d88804c94f" containerID="79d8c909cce78c5a584cf96f0aac7f210acfc1fdbcc47a9252a30626b2e2a6d2" exitCode=0 Oct 01 14:04:57 crc kubenswrapper[4605]: I1001 14:04:57.493468 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-fh5rm" event={"ID":"44883e34-65b9-4eef-a5ae-75d88804c94f","Type":"ContainerDied","Data":"79d8c909cce78c5a584cf96f0aac7f210acfc1fdbcc47a9252a30626b2e2a6d2"} Oct 01 14:04:57 crc kubenswrapper[4605]: I1001 14:04:57.496290 4605 generic.go:334] "Generic (PLEG): container finished" podID="ed331d86-1f29-4a49-a436-2e7f9c55a2d0" containerID="1f76b559baad8105afd4645aaa68235d885c9fefea55e231d8b64126ae9cf55c" exitCode=0 Oct 01 14:04:57 crc kubenswrapper[4605]: I1001 14:04:57.496378 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5784cf869f-8rr76" event={"ID":"ed331d86-1f29-4a49-a436-2e7f9c55a2d0","Type":"ContainerDied","Data":"1f76b559baad8105afd4645aaa68235d885c9fefea55e231d8b64126ae9cf55c"} Oct 01 14:04:57 crc kubenswrapper[4605]: I1001 14:04:57.504502 4605 generic.go:334] "Generic (PLEG): container finished" podID="fd313e13-49ce-49c2-be82-6ca3c0fbb2ed" containerID="c86da58e244c9ce37241f58f84c24a81ed6db6946557e5d54be22c53fc35b6c4" exitCode=2 Oct 01 14:04:57 crc kubenswrapper[4605]: I1001 14:04:57.505397 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 01 14:04:57 crc kubenswrapper[4605]: I1001 14:04:57.513941 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"fd313e13-49ce-49c2-be82-6ca3c0fbb2ed","Type":"ContainerDied","Data":"c86da58e244c9ce37241f58f84c24a81ed6db6946557e5d54be22c53fc35b6c4"} Oct 01 14:04:57 crc kubenswrapper[4605]: I1001 14:04:57.514032 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"fd313e13-49ce-49c2-be82-6ca3c0fbb2ed","Type":"ContainerDied","Data":"248e947d7551ec9bb605c4965c1a5db19a754bb3e3c076a6302fee08cafd4c8e"} Oct 01 14:04:57 crc kubenswrapper[4605]: I1001 14:04:57.514110 4605 scope.go:117] "RemoveContainer" containerID="c86da58e244c9ce37241f58f84c24a81ed6db6946557e5d54be22c53fc35b6c4" Oct 01 14:04:57 crc kubenswrapper[4605]: I1001 14:04:57.581986 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 01 14:04:57 crc kubenswrapper[4605]: I1001 14:04:57.599280 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 01 14:04:57 crc kubenswrapper[4605]: I1001 14:04:57.599741 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Oct 01 14:04:57 crc kubenswrapper[4605]: I1001 14:04:57.606565 4605 scope.go:117] "RemoveContainer" containerID="c86da58e244c9ce37241f58f84c24a81ed6db6946557e5d54be22c53fc35b6c4" Oct 01 14:04:57 crc kubenswrapper[4605]: E1001 14:04:57.610895 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c86da58e244c9ce37241f58f84c24a81ed6db6946557e5d54be22c53fc35b6c4\": container with ID starting with c86da58e244c9ce37241f58f84c24a81ed6db6946557e5d54be22c53fc35b6c4 not found: ID does not exist" containerID="c86da58e244c9ce37241f58f84c24a81ed6db6946557e5d54be22c53fc35b6c4" Oct 01 14:04:57 crc kubenswrapper[4605]: I1001 14:04:57.610932 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c86da58e244c9ce37241f58f84c24a81ed6db6946557e5d54be22c53fc35b6c4"} err="failed to get container status \"c86da58e244c9ce37241f58f84c24a81ed6db6946557e5d54be22c53fc35b6c4\": rpc error: code = NotFound desc = could not find container \"c86da58e244c9ce37241f58f84c24a81ed6db6946557e5d54be22c53fc35b6c4\": container with ID starting with c86da58e244c9ce37241f58f84c24a81ed6db6946557e5d54be22c53fc35b6c4 not found: ID does not exist" Oct 01 14:04:57 crc kubenswrapper[4605]: I1001 14:04:57.682470 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Oct 01 14:04:57 crc kubenswrapper[4605]: E1001 14:04:57.706636 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd313e13-49ce-49c2-be82-6ca3c0fbb2ed" containerName="kube-state-metrics" Oct 01 14:04:57 crc kubenswrapper[4605]: I1001 14:04:57.706660 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd313e13-49ce-49c2-be82-6ca3c0fbb2ed" containerName="kube-state-metrics" Oct 01 14:04:57 crc kubenswrapper[4605]: I1001 14:04:57.712367 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd313e13-49ce-49c2-be82-6ca3c0fbb2ed" containerName="kube-state-metrics" Oct 01 14:04:57 crc kubenswrapper[4605]: I1001 14:04:57.713230 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 01 14:04:57 crc kubenswrapper[4605]: I1001 14:04:57.716615 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 01 14:04:57 crc kubenswrapper[4605]: I1001 14:04:57.722134 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Oct 01 14:04:57 crc kubenswrapper[4605]: I1001 14:04:57.722208 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Oct 01 14:04:57 crc kubenswrapper[4605]: I1001 14:04:57.797284 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/d74d1f54-092e-4bd7-90f4-e6ad8e4a77ea-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"d74d1f54-092e-4bd7-90f4-e6ad8e4a77ea\") " pod="openstack/kube-state-metrics-0" Oct 01 14:04:57 crc kubenswrapper[4605]: I1001 14:04:57.797487 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d74d1f54-092e-4bd7-90f4-e6ad8e4a77ea-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"d74d1f54-092e-4bd7-90f4-e6ad8e4a77ea\") " pod="openstack/kube-state-metrics-0" Oct 01 14:04:57 crc kubenswrapper[4605]: I1001 14:04:57.797575 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kcm7g\" (UniqueName: \"kubernetes.io/projected/d74d1f54-092e-4bd7-90f4-e6ad8e4a77ea-kube-api-access-kcm7g\") pod \"kube-state-metrics-0\" (UID: \"d74d1f54-092e-4bd7-90f4-e6ad8e4a77ea\") " pod="openstack/kube-state-metrics-0" Oct 01 14:04:57 crc kubenswrapper[4605]: I1001 14:04:57.797653 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/d74d1f54-092e-4bd7-90f4-e6ad8e4a77ea-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"d74d1f54-092e-4bd7-90f4-e6ad8e4a77ea\") " pod="openstack/kube-state-metrics-0" Oct 01 14:04:57 crc kubenswrapper[4605]: I1001 14:04:57.899213 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/d74d1f54-092e-4bd7-90f4-e6ad8e4a77ea-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"d74d1f54-092e-4bd7-90f4-e6ad8e4a77ea\") " pod="openstack/kube-state-metrics-0" Oct 01 14:04:57 crc kubenswrapper[4605]: I1001 14:04:57.899294 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d74d1f54-092e-4bd7-90f4-e6ad8e4a77ea-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"d74d1f54-092e-4bd7-90f4-e6ad8e4a77ea\") " pod="openstack/kube-state-metrics-0" Oct 01 14:04:57 crc kubenswrapper[4605]: I1001 14:04:57.899334 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kcm7g\" (UniqueName: \"kubernetes.io/projected/d74d1f54-092e-4bd7-90f4-e6ad8e4a77ea-kube-api-access-kcm7g\") pod \"kube-state-metrics-0\" (UID: \"d74d1f54-092e-4bd7-90f4-e6ad8e4a77ea\") " pod="openstack/kube-state-metrics-0" Oct 01 14:04:57 crc kubenswrapper[4605]: I1001 14:04:57.899397 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/d74d1f54-092e-4bd7-90f4-e6ad8e4a77ea-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"d74d1f54-092e-4bd7-90f4-e6ad8e4a77ea\") " pod="openstack/kube-state-metrics-0" Oct 01 14:04:57 crc kubenswrapper[4605]: I1001 14:04:57.912995 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d74d1f54-092e-4bd7-90f4-e6ad8e4a77ea-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"d74d1f54-092e-4bd7-90f4-e6ad8e4a77ea\") " pod="openstack/kube-state-metrics-0" Oct 01 14:04:57 crc kubenswrapper[4605]: I1001 14:04:57.913639 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/d74d1f54-092e-4bd7-90f4-e6ad8e4a77ea-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"d74d1f54-092e-4bd7-90f4-e6ad8e4a77ea\") " pod="openstack/kube-state-metrics-0" Oct 01 14:04:57 crc kubenswrapper[4605]: I1001 14:04:57.917551 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/d74d1f54-092e-4bd7-90f4-e6ad8e4a77ea-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"d74d1f54-092e-4bd7-90f4-e6ad8e4a77ea\") " pod="openstack/kube-state-metrics-0" Oct 01 14:04:57 crc kubenswrapper[4605]: I1001 14:04:57.925687 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kcm7g\" (UniqueName: \"kubernetes.io/projected/d74d1f54-092e-4bd7-90f4-e6ad8e4a77ea-kube-api-access-kcm7g\") pod \"kube-state-metrics-0\" (UID: \"d74d1f54-092e-4bd7-90f4-e6ad8e4a77ea\") " pod="openstack/kube-state-metrics-0" Oct 01 14:04:57 crc kubenswrapper[4605]: I1001 14:04:57.945923 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fd313e13-49ce-49c2-be82-6ca3c0fbb2ed" path="/var/lib/kubelet/pods/fd313e13-49ce-49c2-be82-6ca3c0fbb2ed/volumes" Oct 01 14:04:58 crc kubenswrapper[4605]: I1001 14:04:58.026640 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5784cf869f-8rr76" Oct 01 14:04:58 crc kubenswrapper[4605]: I1001 14:04:58.077542 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 01 14:04:58 crc kubenswrapper[4605]: I1001 14:04:58.149456 4605 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="0423df9d-25a8-45f3-9a48-f8f01d526f37" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.189:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 01 14:04:58 crc kubenswrapper[4605]: I1001 14:04:58.149596 4605 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="0423df9d-25a8-45f3-9a48-f8f01d526f37" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.189:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 01 14:04:58 crc kubenswrapper[4605]: I1001 14:04:58.204827 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dj4qd\" (UniqueName: \"kubernetes.io/projected/ed331d86-1f29-4a49-a436-2e7f9c55a2d0-kube-api-access-dj4qd\") pod \"ed331d86-1f29-4a49-a436-2e7f9c55a2d0\" (UID: \"ed331d86-1f29-4a49-a436-2e7f9c55a2d0\") " Oct 01 14:04:58 crc kubenswrapper[4605]: I1001 14:04:58.204902 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ed331d86-1f29-4a49-a436-2e7f9c55a2d0-ovsdbserver-nb\") pod \"ed331d86-1f29-4a49-a436-2e7f9c55a2d0\" (UID: \"ed331d86-1f29-4a49-a436-2e7f9c55a2d0\") " Oct 01 14:04:58 crc kubenswrapper[4605]: I1001 14:04:58.204948 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ed331d86-1f29-4a49-a436-2e7f9c55a2d0-dns-svc\") pod \"ed331d86-1f29-4a49-a436-2e7f9c55a2d0\" (UID: \"ed331d86-1f29-4a49-a436-2e7f9c55a2d0\") " Oct 01 14:04:58 crc kubenswrapper[4605]: I1001 14:04:58.205021 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ed331d86-1f29-4a49-a436-2e7f9c55a2d0-dns-swift-storage-0\") pod \"ed331d86-1f29-4a49-a436-2e7f9c55a2d0\" (UID: \"ed331d86-1f29-4a49-a436-2e7f9c55a2d0\") " Oct 01 14:04:58 crc kubenswrapper[4605]: I1001 14:04:58.205153 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ed331d86-1f29-4a49-a436-2e7f9c55a2d0-ovsdbserver-sb\") pod \"ed331d86-1f29-4a49-a436-2e7f9c55a2d0\" (UID: \"ed331d86-1f29-4a49-a436-2e7f9c55a2d0\") " Oct 01 14:04:58 crc kubenswrapper[4605]: I1001 14:04:58.205227 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ed331d86-1f29-4a49-a436-2e7f9c55a2d0-config\") pod \"ed331d86-1f29-4a49-a436-2e7f9c55a2d0\" (UID: \"ed331d86-1f29-4a49-a436-2e7f9c55a2d0\") " Oct 01 14:04:58 crc kubenswrapper[4605]: I1001 14:04:58.214426 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ed331d86-1f29-4a49-a436-2e7f9c55a2d0-kube-api-access-dj4qd" (OuterVolumeSpecName: "kube-api-access-dj4qd") pod "ed331d86-1f29-4a49-a436-2e7f9c55a2d0" (UID: "ed331d86-1f29-4a49-a436-2e7f9c55a2d0"). InnerVolumeSpecName "kube-api-access-dj4qd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:04:58 crc kubenswrapper[4605]: I1001 14:04:58.250539 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ed331d86-1f29-4a49-a436-2e7f9c55a2d0-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "ed331d86-1f29-4a49-a436-2e7f9c55a2d0" (UID: "ed331d86-1f29-4a49-a436-2e7f9c55a2d0"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:04:58 crc kubenswrapper[4605]: I1001 14:04:58.284248 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ed331d86-1f29-4a49-a436-2e7f9c55a2d0-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "ed331d86-1f29-4a49-a436-2e7f9c55a2d0" (UID: "ed331d86-1f29-4a49-a436-2e7f9c55a2d0"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:04:58 crc kubenswrapper[4605]: I1001 14:04:58.290289 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ed331d86-1f29-4a49-a436-2e7f9c55a2d0-config" (OuterVolumeSpecName: "config") pod "ed331d86-1f29-4a49-a436-2e7f9c55a2d0" (UID: "ed331d86-1f29-4a49-a436-2e7f9c55a2d0"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:04:58 crc kubenswrapper[4605]: I1001 14:04:58.307989 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ed331d86-1f29-4a49-a436-2e7f9c55a2d0-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ed331d86-1f29-4a49-a436-2e7f9c55a2d0" (UID: "ed331d86-1f29-4a49-a436-2e7f9c55a2d0"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:04:58 crc kubenswrapper[4605]: I1001 14:04:58.311476 4605 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ed331d86-1f29-4a49-a436-2e7f9c55a2d0-config\") on node \"crc\" DevicePath \"\"" Oct 01 14:04:58 crc kubenswrapper[4605]: I1001 14:04:58.311499 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dj4qd\" (UniqueName: \"kubernetes.io/projected/ed331d86-1f29-4a49-a436-2e7f9c55a2d0-kube-api-access-dj4qd\") on node \"crc\" DevicePath \"\"" Oct 01 14:04:58 crc kubenswrapper[4605]: I1001 14:04:58.311510 4605 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ed331d86-1f29-4a49-a436-2e7f9c55a2d0-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 01 14:04:58 crc kubenswrapper[4605]: I1001 14:04:58.311519 4605 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ed331d86-1f29-4a49-a436-2e7f9c55a2d0-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 01 14:04:58 crc kubenswrapper[4605]: I1001 14:04:58.311528 4605 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ed331d86-1f29-4a49-a436-2e7f9c55a2d0-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 01 14:04:58 crc kubenswrapper[4605]: I1001 14:04:58.343248 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ed331d86-1f29-4a49-a436-2e7f9c55a2d0-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "ed331d86-1f29-4a49-a436-2e7f9c55a2d0" (UID: "ed331d86-1f29-4a49-a436-2e7f9c55a2d0"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:04:58 crc kubenswrapper[4605]: I1001 14:04:58.412869 4605 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ed331d86-1f29-4a49-a436-2e7f9c55a2d0-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 01 14:04:58 crc kubenswrapper[4605]: I1001 14:04:58.522530 4605 generic.go:334] "Generic (PLEG): container finished" podID="6d1195cd-cd23-4f48-a14a-f2ee21a0c0e2" containerID="409e3f3177d2af1bb27ec05ffd965bc3ec8187272d7ca9d0028809254304f380" exitCode=0 Oct 01 14:04:58 crc kubenswrapper[4605]: I1001 14:04:58.522590 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-78hpx" event={"ID":"6d1195cd-cd23-4f48-a14a-f2ee21a0c0e2","Type":"ContainerDied","Data":"409e3f3177d2af1bb27ec05ffd965bc3ec8187272d7ca9d0028809254304f380"} Oct 01 14:04:58 crc kubenswrapper[4605]: I1001 14:04:58.526528 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5784cf869f-8rr76" Oct 01 14:04:58 crc kubenswrapper[4605]: I1001 14:04:58.527615 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5784cf869f-8rr76" event={"ID":"ed331d86-1f29-4a49-a436-2e7f9c55a2d0","Type":"ContainerDied","Data":"489e42d187559bd90aa03ddd16375449367c120a8eafbe582b68c138dbe9768c"} Oct 01 14:04:58 crc kubenswrapper[4605]: I1001 14:04:58.527656 4605 scope.go:117] "RemoveContainer" containerID="1f76b559baad8105afd4645aaa68235d885c9fefea55e231d8b64126ae9cf55c" Oct 01 14:04:58 crc kubenswrapper[4605]: I1001 14:04:58.564035 4605 scope.go:117] "RemoveContainer" containerID="bcbb6bf16b4baee0c0b23c4457ed083a5446b54045ff1072aa2bc1130eb7992b" Oct 01 14:04:58 crc kubenswrapper[4605]: I1001 14:04:58.585611 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-8rr76"] Oct 01 14:04:58 crc kubenswrapper[4605]: I1001 14:04:58.622027 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-8rr76"] Oct 01 14:04:58 crc kubenswrapper[4605]: I1001 14:04:58.648915 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 01 14:04:58 crc kubenswrapper[4605]: I1001 14:04:58.817645 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-fh5rm" Oct 01 14:04:58 crc kubenswrapper[4605]: I1001 14:04:58.920528 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l5rqt\" (UniqueName: \"kubernetes.io/projected/44883e34-65b9-4eef-a5ae-75d88804c94f-kube-api-access-l5rqt\") pod \"44883e34-65b9-4eef-a5ae-75d88804c94f\" (UID: \"44883e34-65b9-4eef-a5ae-75d88804c94f\") " Oct 01 14:04:58 crc kubenswrapper[4605]: I1001 14:04:58.920690 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/44883e34-65b9-4eef-a5ae-75d88804c94f-config-data\") pod \"44883e34-65b9-4eef-a5ae-75d88804c94f\" (UID: \"44883e34-65b9-4eef-a5ae-75d88804c94f\") " Oct 01 14:04:58 crc kubenswrapper[4605]: I1001 14:04:58.920775 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/44883e34-65b9-4eef-a5ae-75d88804c94f-scripts\") pod \"44883e34-65b9-4eef-a5ae-75d88804c94f\" (UID: \"44883e34-65b9-4eef-a5ae-75d88804c94f\") " Oct 01 14:04:58 crc kubenswrapper[4605]: I1001 14:04:58.920954 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44883e34-65b9-4eef-a5ae-75d88804c94f-combined-ca-bundle\") pod \"44883e34-65b9-4eef-a5ae-75d88804c94f\" (UID: \"44883e34-65b9-4eef-a5ae-75d88804c94f\") " Oct 01 14:04:58 crc kubenswrapper[4605]: I1001 14:04:58.928437 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44883e34-65b9-4eef-a5ae-75d88804c94f-kube-api-access-l5rqt" (OuterVolumeSpecName: "kube-api-access-l5rqt") pod "44883e34-65b9-4eef-a5ae-75d88804c94f" (UID: "44883e34-65b9-4eef-a5ae-75d88804c94f"). InnerVolumeSpecName "kube-api-access-l5rqt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:04:58 crc kubenswrapper[4605]: I1001 14:04:58.928691 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/44883e34-65b9-4eef-a5ae-75d88804c94f-scripts" (OuterVolumeSpecName: "scripts") pod "44883e34-65b9-4eef-a5ae-75d88804c94f" (UID: "44883e34-65b9-4eef-a5ae-75d88804c94f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:04:58 crc kubenswrapper[4605]: I1001 14:04:58.959234 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/44883e34-65b9-4eef-a5ae-75d88804c94f-config-data" (OuterVolumeSpecName: "config-data") pod "44883e34-65b9-4eef-a5ae-75d88804c94f" (UID: "44883e34-65b9-4eef-a5ae-75d88804c94f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:04:58 crc kubenswrapper[4605]: I1001 14:04:58.960035 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/44883e34-65b9-4eef-a5ae-75d88804c94f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "44883e34-65b9-4eef-a5ae-75d88804c94f" (UID: "44883e34-65b9-4eef-a5ae-75d88804c94f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:04:59 crc kubenswrapper[4605]: I1001 14:04:59.023606 4605 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44883e34-65b9-4eef-a5ae-75d88804c94f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 14:04:59 crc kubenswrapper[4605]: I1001 14:04:59.023844 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l5rqt\" (UniqueName: \"kubernetes.io/projected/44883e34-65b9-4eef-a5ae-75d88804c94f-kube-api-access-l5rqt\") on node \"crc\" DevicePath \"\"" Oct 01 14:04:59 crc kubenswrapper[4605]: I1001 14:04:59.023907 4605 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/44883e34-65b9-4eef-a5ae-75d88804c94f-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 14:04:59 crc kubenswrapper[4605]: I1001 14:04:59.023964 4605 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/44883e34-65b9-4eef-a5ae-75d88804c94f-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 14:04:59 crc kubenswrapper[4605]: I1001 14:04:59.272895 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 01 14:04:59 crc kubenswrapper[4605]: I1001 14:04:59.295848 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 01 14:04:59 crc kubenswrapper[4605]: I1001 14:04:59.296068 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="0423df9d-25a8-45f3-9a48-f8f01d526f37" containerName="nova-api-log" containerID="cri-o://f09336d890304474dcd55d9f8ca93991cd0c6c655c452a3e0a5f4d7ec1a7af98" gracePeriod=30 Oct 01 14:04:59 crc kubenswrapper[4605]: I1001 14:04:59.296212 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="0423df9d-25a8-45f3-9a48-f8f01d526f37" containerName="nova-api-api" containerID="cri-o://57c6936a6e4b8eded978cef75d00b2811593015696181d14b906313bf23ece31" gracePeriod=30 Oct 01 14:04:59 crc kubenswrapper[4605]: I1001 14:04:59.535774 4605 generic.go:334] "Generic (PLEG): container finished" podID="0423df9d-25a8-45f3-9a48-f8f01d526f37" containerID="f09336d890304474dcd55d9f8ca93991cd0c6c655c452a3e0a5f4d7ec1a7af98" exitCode=143 Oct 01 14:04:59 crc kubenswrapper[4605]: I1001 14:04:59.535864 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"0423df9d-25a8-45f3-9a48-f8f01d526f37","Type":"ContainerDied","Data":"f09336d890304474dcd55d9f8ca93991cd0c6c655c452a3e0a5f4d7ec1a7af98"} Oct 01 14:04:59 crc kubenswrapper[4605]: I1001 14:04:59.537513 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"d74d1f54-092e-4bd7-90f4-e6ad8e4a77ea","Type":"ContainerStarted","Data":"4566dc4982ffb0c6d0f01dd395bc151d44dc3c3febea0c1ccdd43cd99432bb24"} Oct 01 14:04:59 crc kubenswrapper[4605]: I1001 14:04:59.537570 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"d74d1f54-092e-4bd7-90f4-e6ad8e4a77ea","Type":"ContainerStarted","Data":"08f5c778ea7c2d1ae2aabe56a34125e55475eb6f14a9048ed95a9edb1ba8fb75"} Oct 01 14:04:59 crc kubenswrapper[4605]: I1001 14:04:59.537614 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Oct 01 14:04:59 crc kubenswrapper[4605]: I1001 14:04:59.540800 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-fh5rm" Oct 01 14:04:59 crc kubenswrapper[4605]: I1001 14:04:59.540834 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-fh5rm" event={"ID":"44883e34-65b9-4eef-a5ae-75d88804c94f","Type":"ContainerDied","Data":"ff1c5f11ec6b079d9d918cd2db7e09697ea21622718723ecb3e43254ca64c600"} Oct 01 14:04:59 crc kubenswrapper[4605]: I1001 14:04:59.540871 4605 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ff1c5f11ec6b079d9d918cd2db7e09697ea21622718723ecb3e43254ca64c600" Oct 01 14:04:59 crc kubenswrapper[4605]: I1001 14:04:59.565652 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=2.154370503 podStartE2EDuration="2.565631739s" podCreationTimestamp="2025-10-01 14:04:57 +0000 UTC" firstStartedPulling="2025-10-01 14:04:58.672886569 +0000 UTC m=+1221.416862777" lastFinishedPulling="2025-10-01 14:04:59.084147805 +0000 UTC m=+1221.828124013" observedRunningTime="2025-10-01 14:04:59.558452407 +0000 UTC m=+1222.302428615" watchObservedRunningTime="2025-10-01 14:04:59.565631739 +0000 UTC m=+1222.309607937" Oct 01 14:04:59 crc kubenswrapper[4605]: I1001 14:04:59.620227 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 01 14:04:59 crc kubenswrapper[4605]: I1001 14:04:59.620505 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="26405c84-df24-41dc-ae5f-e3146954b888" containerName="ceilometer-central-agent" containerID="cri-o://a7f3826cd60b36ab64c07c52b899048372fc73d1b4e15d7f608a380b04f27d65" gracePeriod=30 Oct 01 14:04:59 crc kubenswrapper[4605]: I1001 14:04:59.620579 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="26405c84-df24-41dc-ae5f-e3146954b888" containerName="proxy-httpd" containerID="cri-o://b279f076245d6570fa80bd2c0f70b8640b7878ca5c4cebf4ee7812920d7ec18e" gracePeriod=30 Oct 01 14:04:59 crc kubenswrapper[4605]: I1001 14:04:59.620625 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="26405c84-df24-41dc-ae5f-e3146954b888" containerName="sg-core" containerID="cri-o://22522085bbc26921c8cae17525286950ff0268fd2de8065016012505c0c598d6" gracePeriod=30 Oct 01 14:04:59 crc kubenswrapper[4605]: I1001 14:04:59.620642 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="26405c84-df24-41dc-ae5f-e3146954b888" containerName="ceilometer-notification-agent" containerID="cri-o://e8d7e721997d8f771d4064d87b6186ef46f12f26db2f23364532059a888198a7" gracePeriod=30 Oct 01 14:04:59 crc kubenswrapper[4605]: I1001 14:04:59.942415 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ed331d86-1f29-4a49-a436-2e7f9c55a2d0" path="/var/lib/kubelet/pods/ed331d86-1f29-4a49-a436-2e7f9c55a2d0/volumes" Oct 01 14:04:59 crc kubenswrapper[4605]: I1001 14:04:59.969902 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-78hpx" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.148113 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6d1195cd-cd23-4f48-a14a-f2ee21a0c0e2-scripts\") pod \"6d1195cd-cd23-4f48-a14a-f2ee21a0c0e2\" (UID: \"6d1195cd-cd23-4f48-a14a-f2ee21a0c0e2\") " Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.148340 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d1195cd-cd23-4f48-a14a-f2ee21a0c0e2-combined-ca-bundle\") pod \"6d1195cd-cd23-4f48-a14a-f2ee21a0c0e2\" (UID: \"6d1195cd-cd23-4f48-a14a-f2ee21a0c0e2\") " Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.148399 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w92dr\" (UniqueName: \"kubernetes.io/projected/6d1195cd-cd23-4f48-a14a-f2ee21a0c0e2-kube-api-access-w92dr\") pod \"6d1195cd-cd23-4f48-a14a-f2ee21a0c0e2\" (UID: \"6d1195cd-cd23-4f48-a14a-f2ee21a0c0e2\") " Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.148473 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d1195cd-cd23-4f48-a14a-f2ee21a0c0e2-config-data\") pod \"6d1195cd-cd23-4f48-a14a-f2ee21a0c0e2\" (UID: \"6d1195cd-cd23-4f48-a14a-f2ee21a0c0e2\") " Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.156023 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6d1195cd-cd23-4f48-a14a-f2ee21a0c0e2-kube-api-access-w92dr" (OuterVolumeSpecName: "kube-api-access-w92dr") pod "6d1195cd-cd23-4f48-a14a-f2ee21a0c0e2" (UID: "6d1195cd-cd23-4f48-a14a-f2ee21a0c0e2"). InnerVolumeSpecName "kube-api-access-w92dr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.161240 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d1195cd-cd23-4f48-a14a-f2ee21a0c0e2-scripts" (OuterVolumeSpecName: "scripts") pod "6d1195cd-cd23-4f48-a14a-f2ee21a0c0e2" (UID: "6d1195cd-cd23-4f48-a14a-f2ee21a0c0e2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.183593 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d1195cd-cd23-4f48-a14a-f2ee21a0c0e2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6d1195cd-cd23-4f48-a14a-f2ee21a0c0e2" (UID: "6d1195cd-cd23-4f48-a14a-f2ee21a0c0e2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.204941 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d1195cd-cd23-4f48-a14a-f2ee21a0c0e2-config-data" (OuterVolumeSpecName: "config-data") pod "6d1195cd-cd23-4f48-a14a-f2ee21a0c0e2" (UID: "6d1195cd-cd23-4f48-a14a-f2ee21a0c0e2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.250041 4605 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d1195cd-cd23-4f48-a14a-f2ee21a0c0e2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.250077 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w92dr\" (UniqueName: \"kubernetes.io/projected/6d1195cd-cd23-4f48-a14a-f2ee21a0c0e2-kube-api-access-w92dr\") on node \"crc\" DevicePath \"\"" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.250102 4605 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d1195cd-cd23-4f48-a14a-f2ee21a0c0e2-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.250110 4605 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6d1195cd-cd23-4f48-a14a-f2ee21a0c0e2-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.484667 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.556392 4605 generic.go:334] "Generic (PLEG): container finished" podID="26405c84-df24-41dc-ae5f-e3146954b888" containerID="b279f076245d6570fa80bd2c0f70b8640b7878ca5c4cebf4ee7812920d7ec18e" exitCode=0 Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.556422 4605 generic.go:334] "Generic (PLEG): container finished" podID="26405c84-df24-41dc-ae5f-e3146954b888" containerID="22522085bbc26921c8cae17525286950ff0268fd2de8065016012505c0c598d6" exitCode=2 Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.556430 4605 generic.go:334] "Generic (PLEG): container finished" podID="26405c84-df24-41dc-ae5f-e3146954b888" containerID="e8d7e721997d8f771d4064d87b6186ef46f12f26db2f23364532059a888198a7" exitCode=0 Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.556437 4605 generic.go:334] "Generic (PLEG): container finished" podID="26405c84-df24-41dc-ae5f-e3146954b888" containerID="a7f3826cd60b36ab64c07c52b899048372fc73d1b4e15d7f608a380b04f27d65" exitCode=0 Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.556475 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"26405c84-df24-41dc-ae5f-e3146954b888","Type":"ContainerDied","Data":"b279f076245d6570fa80bd2c0f70b8640b7878ca5c4cebf4ee7812920d7ec18e"} Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.556500 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"26405c84-df24-41dc-ae5f-e3146954b888","Type":"ContainerDied","Data":"22522085bbc26921c8cae17525286950ff0268fd2de8065016012505c0c598d6"} Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.556511 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"26405c84-df24-41dc-ae5f-e3146954b888","Type":"ContainerDied","Data":"e8d7e721997d8f771d4064d87b6186ef46f12f26db2f23364532059a888198a7"} Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.556520 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"26405c84-df24-41dc-ae5f-e3146954b888","Type":"ContainerDied","Data":"a7f3826cd60b36ab64c07c52b899048372fc73d1b4e15d7f608a380b04f27d65"} Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.556528 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"26405c84-df24-41dc-ae5f-e3146954b888","Type":"ContainerDied","Data":"246c665414a18d9388a87a5f08f31c04b896b623d5a85be8080f46b07b1235de"} Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.556542 4605 scope.go:117] "RemoveContainer" containerID="b279f076245d6570fa80bd2c0f70b8640b7878ca5c4cebf4ee7812920d7ec18e" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.556652 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.567960 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-78hpx" event={"ID":"6d1195cd-cd23-4f48-a14a-f2ee21a0c0e2","Type":"ContainerDied","Data":"3cba051c2f61caaaf0319888838db4ece69aa2a290a5dcad88926a16372ed992"} Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.568005 4605 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3cba051c2f61caaaf0319888838db4ece69aa2a290a5dcad88926a16372ed992" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.568071 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="f5ff5fdc-351d-40e4-9ecb-5bf4173987a0" containerName="nova-scheduler-scheduler" containerID="cri-o://4c0d4b188b494cae5d9bc2598a1d30e0bb304bc09406ed468dfbfb920b3a7046" gracePeriod=30 Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.568210 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-78hpx" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.590153 4605 scope.go:117] "RemoveContainer" containerID="22522085bbc26921c8cae17525286950ff0268fd2de8065016012505c0c598d6" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.623034 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 01 14:05:00 crc kubenswrapper[4605]: E1001 14:05:00.623403 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26405c84-df24-41dc-ae5f-e3146954b888" containerName="ceilometer-central-agent" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.623415 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="26405c84-df24-41dc-ae5f-e3146954b888" containerName="ceilometer-central-agent" Oct 01 14:05:00 crc kubenswrapper[4605]: E1001 14:05:00.623426 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26405c84-df24-41dc-ae5f-e3146954b888" containerName="proxy-httpd" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.623432 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="26405c84-df24-41dc-ae5f-e3146954b888" containerName="proxy-httpd" Oct 01 14:05:00 crc kubenswrapper[4605]: E1001 14:05:00.623449 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26405c84-df24-41dc-ae5f-e3146954b888" containerName="sg-core" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.623455 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="26405c84-df24-41dc-ae5f-e3146954b888" containerName="sg-core" Oct 01 14:05:00 crc kubenswrapper[4605]: E1001 14:05:00.623469 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d1195cd-cd23-4f48-a14a-f2ee21a0c0e2" containerName="nova-cell1-conductor-db-sync" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.623474 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d1195cd-cd23-4f48-a14a-f2ee21a0c0e2" containerName="nova-cell1-conductor-db-sync" Oct 01 14:05:00 crc kubenswrapper[4605]: E1001 14:05:00.623487 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="44883e34-65b9-4eef-a5ae-75d88804c94f" containerName="nova-manage" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.623493 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="44883e34-65b9-4eef-a5ae-75d88804c94f" containerName="nova-manage" Oct 01 14:05:00 crc kubenswrapper[4605]: E1001 14:05:00.623510 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26405c84-df24-41dc-ae5f-e3146954b888" containerName="ceilometer-notification-agent" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.623516 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="26405c84-df24-41dc-ae5f-e3146954b888" containerName="ceilometer-notification-agent" Oct 01 14:05:00 crc kubenswrapper[4605]: E1001 14:05:00.623522 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed331d86-1f29-4a49-a436-2e7f9c55a2d0" containerName="init" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.623528 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed331d86-1f29-4a49-a436-2e7f9c55a2d0" containerName="init" Oct 01 14:05:00 crc kubenswrapper[4605]: E1001 14:05:00.623535 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed331d86-1f29-4a49-a436-2e7f9c55a2d0" containerName="dnsmasq-dns" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.623541 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed331d86-1f29-4a49-a436-2e7f9c55a2d0" containerName="dnsmasq-dns" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.623737 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="26405c84-df24-41dc-ae5f-e3146954b888" containerName="ceilometer-central-agent" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.623753 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="26405c84-df24-41dc-ae5f-e3146954b888" containerName="sg-core" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.623763 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="26405c84-df24-41dc-ae5f-e3146954b888" containerName="ceilometer-notification-agent" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.623774 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="44883e34-65b9-4eef-a5ae-75d88804c94f" containerName="nova-manage" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.623784 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed331d86-1f29-4a49-a436-2e7f9c55a2d0" containerName="dnsmasq-dns" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.623797 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="6d1195cd-cd23-4f48-a14a-f2ee21a0c0e2" containerName="nova-cell1-conductor-db-sync" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.623807 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="26405c84-df24-41dc-ae5f-e3146954b888" containerName="proxy-httpd" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.624402 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.624423 4605 scope.go:117] "RemoveContainer" containerID="e8d7e721997d8f771d4064d87b6186ef46f12f26db2f23364532059a888198a7" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.632643 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.634025 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.662446 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/26405c84-df24-41dc-ae5f-e3146954b888-config-data\") pod \"26405c84-df24-41dc-ae5f-e3146954b888\" (UID: \"26405c84-df24-41dc-ae5f-e3146954b888\") " Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.662551 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/26405c84-df24-41dc-ae5f-e3146954b888-run-httpd\") pod \"26405c84-df24-41dc-ae5f-e3146954b888\" (UID: \"26405c84-df24-41dc-ae5f-e3146954b888\") " Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.662616 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/26405c84-df24-41dc-ae5f-e3146954b888-combined-ca-bundle\") pod \"26405c84-df24-41dc-ae5f-e3146954b888\" (UID: \"26405c84-df24-41dc-ae5f-e3146954b888\") " Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.662637 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/26405c84-df24-41dc-ae5f-e3146954b888-sg-core-conf-yaml\") pod \"26405c84-df24-41dc-ae5f-e3146954b888\" (UID: \"26405c84-df24-41dc-ae5f-e3146954b888\") " Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.662670 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/26405c84-df24-41dc-ae5f-e3146954b888-scripts\") pod \"26405c84-df24-41dc-ae5f-e3146954b888\" (UID: \"26405c84-df24-41dc-ae5f-e3146954b888\") " Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.662711 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/26405c84-df24-41dc-ae5f-e3146954b888-log-httpd\") pod \"26405c84-df24-41dc-ae5f-e3146954b888\" (UID: \"26405c84-df24-41dc-ae5f-e3146954b888\") " Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.662744 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p6ks5\" (UniqueName: \"kubernetes.io/projected/26405c84-df24-41dc-ae5f-e3146954b888-kube-api-access-p6ks5\") pod \"26405c84-df24-41dc-ae5f-e3146954b888\" (UID: \"26405c84-df24-41dc-ae5f-e3146954b888\") " Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.662994 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/26405c84-df24-41dc-ae5f-e3146954b888-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "26405c84-df24-41dc-ae5f-e3146954b888" (UID: "26405c84-df24-41dc-ae5f-e3146954b888"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.663186 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/26405c84-df24-41dc-ae5f-e3146954b888-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "26405c84-df24-41dc-ae5f-e3146954b888" (UID: "26405c84-df24-41dc-ae5f-e3146954b888"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.663264 4605 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/26405c84-df24-41dc-ae5f-e3146954b888-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.663275 4605 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/26405c84-df24-41dc-ae5f-e3146954b888-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.671244 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/26405c84-df24-41dc-ae5f-e3146954b888-kube-api-access-p6ks5" (OuterVolumeSpecName: "kube-api-access-p6ks5") pod "26405c84-df24-41dc-ae5f-e3146954b888" (UID: "26405c84-df24-41dc-ae5f-e3146954b888"). InnerVolumeSpecName "kube-api-access-p6ks5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.671505 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/26405c84-df24-41dc-ae5f-e3146954b888-scripts" (OuterVolumeSpecName: "scripts") pod "26405c84-df24-41dc-ae5f-e3146954b888" (UID: "26405c84-df24-41dc-ae5f-e3146954b888"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.674590 4605 scope.go:117] "RemoveContainer" containerID="a7f3826cd60b36ab64c07c52b899048372fc73d1b4e15d7f608a380b04f27d65" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.706855 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/26405c84-df24-41dc-ae5f-e3146954b888-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "26405c84-df24-41dc-ae5f-e3146954b888" (UID: "26405c84-df24-41dc-ae5f-e3146954b888"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.716369 4605 scope.go:117] "RemoveContainer" containerID="b279f076245d6570fa80bd2c0f70b8640b7878ca5c4cebf4ee7812920d7ec18e" Oct 01 14:05:00 crc kubenswrapper[4605]: E1001 14:05:00.719467 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b279f076245d6570fa80bd2c0f70b8640b7878ca5c4cebf4ee7812920d7ec18e\": container with ID starting with b279f076245d6570fa80bd2c0f70b8640b7878ca5c4cebf4ee7812920d7ec18e not found: ID does not exist" containerID="b279f076245d6570fa80bd2c0f70b8640b7878ca5c4cebf4ee7812920d7ec18e" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.719500 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b279f076245d6570fa80bd2c0f70b8640b7878ca5c4cebf4ee7812920d7ec18e"} err="failed to get container status \"b279f076245d6570fa80bd2c0f70b8640b7878ca5c4cebf4ee7812920d7ec18e\": rpc error: code = NotFound desc = could not find container \"b279f076245d6570fa80bd2c0f70b8640b7878ca5c4cebf4ee7812920d7ec18e\": container with ID starting with b279f076245d6570fa80bd2c0f70b8640b7878ca5c4cebf4ee7812920d7ec18e not found: ID does not exist" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.719525 4605 scope.go:117] "RemoveContainer" containerID="22522085bbc26921c8cae17525286950ff0268fd2de8065016012505c0c598d6" Oct 01 14:05:00 crc kubenswrapper[4605]: E1001 14:05:00.719722 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"22522085bbc26921c8cae17525286950ff0268fd2de8065016012505c0c598d6\": container with ID starting with 22522085bbc26921c8cae17525286950ff0268fd2de8065016012505c0c598d6 not found: ID does not exist" containerID="22522085bbc26921c8cae17525286950ff0268fd2de8065016012505c0c598d6" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.719745 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"22522085bbc26921c8cae17525286950ff0268fd2de8065016012505c0c598d6"} err="failed to get container status \"22522085bbc26921c8cae17525286950ff0268fd2de8065016012505c0c598d6\": rpc error: code = NotFound desc = could not find container \"22522085bbc26921c8cae17525286950ff0268fd2de8065016012505c0c598d6\": container with ID starting with 22522085bbc26921c8cae17525286950ff0268fd2de8065016012505c0c598d6 not found: ID does not exist" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.719760 4605 scope.go:117] "RemoveContainer" containerID="e8d7e721997d8f771d4064d87b6186ef46f12f26db2f23364532059a888198a7" Oct 01 14:05:00 crc kubenswrapper[4605]: E1001 14:05:00.719956 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e8d7e721997d8f771d4064d87b6186ef46f12f26db2f23364532059a888198a7\": container with ID starting with e8d7e721997d8f771d4064d87b6186ef46f12f26db2f23364532059a888198a7 not found: ID does not exist" containerID="e8d7e721997d8f771d4064d87b6186ef46f12f26db2f23364532059a888198a7" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.719975 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e8d7e721997d8f771d4064d87b6186ef46f12f26db2f23364532059a888198a7"} err="failed to get container status \"e8d7e721997d8f771d4064d87b6186ef46f12f26db2f23364532059a888198a7\": rpc error: code = NotFound desc = could not find container \"e8d7e721997d8f771d4064d87b6186ef46f12f26db2f23364532059a888198a7\": container with ID starting with e8d7e721997d8f771d4064d87b6186ef46f12f26db2f23364532059a888198a7 not found: ID does not exist" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.719988 4605 scope.go:117] "RemoveContainer" containerID="a7f3826cd60b36ab64c07c52b899048372fc73d1b4e15d7f608a380b04f27d65" Oct 01 14:05:00 crc kubenswrapper[4605]: E1001 14:05:00.720188 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a7f3826cd60b36ab64c07c52b899048372fc73d1b4e15d7f608a380b04f27d65\": container with ID starting with a7f3826cd60b36ab64c07c52b899048372fc73d1b4e15d7f608a380b04f27d65 not found: ID does not exist" containerID="a7f3826cd60b36ab64c07c52b899048372fc73d1b4e15d7f608a380b04f27d65" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.720204 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a7f3826cd60b36ab64c07c52b899048372fc73d1b4e15d7f608a380b04f27d65"} err="failed to get container status \"a7f3826cd60b36ab64c07c52b899048372fc73d1b4e15d7f608a380b04f27d65\": rpc error: code = NotFound desc = could not find container \"a7f3826cd60b36ab64c07c52b899048372fc73d1b4e15d7f608a380b04f27d65\": container with ID starting with a7f3826cd60b36ab64c07c52b899048372fc73d1b4e15d7f608a380b04f27d65 not found: ID does not exist" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.720218 4605 scope.go:117] "RemoveContainer" containerID="b279f076245d6570fa80bd2c0f70b8640b7878ca5c4cebf4ee7812920d7ec18e" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.724470 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b279f076245d6570fa80bd2c0f70b8640b7878ca5c4cebf4ee7812920d7ec18e"} err="failed to get container status \"b279f076245d6570fa80bd2c0f70b8640b7878ca5c4cebf4ee7812920d7ec18e\": rpc error: code = NotFound desc = could not find container \"b279f076245d6570fa80bd2c0f70b8640b7878ca5c4cebf4ee7812920d7ec18e\": container with ID starting with b279f076245d6570fa80bd2c0f70b8640b7878ca5c4cebf4ee7812920d7ec18e not found: ID does not exist" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.724498 4605 scope.go:117] "RemoveContainer" containerID="22522085bbc26921c8cae17525286950ff0268fd2de8065016012505c0c598d6" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.728504 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"22522085bbc26921c8cae17525286950ff0268fd2de8065016012505c0c598d6"} err="failed to get container status \"22522085bbc26921c8cae17525286950ff0268fd2de8065016012505c0c598d6\": rpc error: code = NotFound desc = could not find container \"22522085bbc26921c8cae17525286950ff0268fd2de8065016012505c0c598d6\": container with ID starting with 22522085bbc26921c8cae17525286950ff0268fd2de8065016012505c0c598d6 not found: ID does not exist" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.728522 4605 scope.go:117] "RemoveContainer" containerID="e8d7e721997d8f771d4064d87b6186ef46f12f26db2f23364532059a888198a7" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.728825 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e8d7e721997d8f771d4064d87b6186ef46f12f26db2f23364532059a888198a7"} err="failed to get container status \"e8d7e721997d8f771d4064d87b6186ef46f12f26db2f23364532059a888198a7\": rpc error: code = NotFound desc = could not find container \"e8d7e721997d8f771d4064d87b6186ef46f12f26db2f23364532059a888198a7\": container with ID starting with e8d7e721997d8f771d4064d87b6186ef46f12f26db2f23364532059a888198a7 not found: ID does not exist" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.728843 4605 scope.go:117] "RemoveContainer" containerID="a7f3826cd60b36ab64c07c52b899048372fc73d1b4e15d7f608a380b04f27d65" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.729056 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a7f3826cd60b36ab64c07c52b899048372fc73d1b4e15d7f608a380b04f27d65"} err="failed to get container status \"a7f3826cd60b36ab64c07c52b899048372fc73d1b4e15d7f608a380b04f27d65\": rpc error: code = NotFound desc = could not find container \"a7f3826cd60b36ab64c07c52b899048372fc73d1b4e15d7f608a380b04f27d65\": container with ID starting with a7f3826cd60b36ab64c07c52b899048372fc73d1b4e15d7f608a380b04f27d65 not found: ID does not exist" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.729079 4605 scope.go:117] "RemoveContainer" containerID="b279f076245d6570fa80bd2c0f70b8640b7878ca5c4cebf4ee7812920d7ec18e" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.730342 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b279f076245d6570fa80bd2c0f70b8640b7878ca5c4cebf4ee7812920d7ec18e"} err="failed to get container status \"b279f076245d6570fa80bd2c0f70b8640b7878ca5c4cebf4ee7812920d7ec18e\": rpc error: code = NotFound desc = could not find container \"b279f076245d6570fa80bd2c0f70b8640b7878ca5c4cebf4ee7812920d7ec18e\": container with ID starting with b279f076245d6570fa80bd2c0f70b8640b7878ca5c4cebf4ee7812920d7ec18e not found: ID does not exist" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.730361 4605 scope.go:117] "RemoveContainer" containerID="22522085bbc26921c8cae17525286950ff0268fd2de8065016012505c0c598d6" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.731632 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"22522085bbc26921c8cae17525286950ff0268fd2de8065016012505c0c598d6"} err="failed to get container status \"22522085bbc26921c8cae17525286950ff0268fd2de8065016012505c0c598d6\": rpc error: code = NotFound desc = could not find container \"22522085bbc26921c8cae17525286950ff0268fd2de8065016012505c0c598d6\": container with ID starting with 22522085bbc26921c8cae17525286950ff0268fd2de8065016012505c0c598d6 not found: ID does not exist" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.731648 4605 scope.go:117] "RemoveContainer" containerID="e8d7e721997d8f771d4064d87b6186ef46f12f26db2f23364532059a888198a7" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.738483 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e8d7e721997d8f771d4064d87b6186ef46f12f26db2f23364532059a888198a7"} err="failed to get container status \"e8d7e721997d8f771d4064d87b6186ef46f12f26db2f23364532059a888198a7\": rpc error: code = NotFound desc = could not find container \"e8d7e721997d8f771d4064d87b6186ef46f12f26db2f23364532059a888198a7\": container with ID starting with e8d7e721997d8f771d4064d87b6186ef46f12f26db2f23364532059a888198a7 not found: ID does not exist" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.738513 4605 scope.go:117] "RemoveContainer" containerID="a7f3826cd60b36ab64c07c52b899048372fc73d1b4e15d7f608a380b04f27d65" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.739200 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a7f3826cd60b36ab64c07c52b899048372fc73d1b4e15d7f608a380b04f27d65"} err="failed to get container status \"a7f3826cd60b36ab64c07c52b899048372fc73d1b4e15d7f608a380b04f27d65\": rpc error: code = NotFound desc = could not find container \"a7f3826cd60b36ab64c07c52b899048372fc73d1b4e15d7f608a380b04f27d65\": container with ID starting with a7f3826cd60b36ab64c07c52b899048372fc73d1b4e15d7f608a380b04f27d65 not found: ID does not exist" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.739219 4605 scope.go:117] "RemoveContainer" containerID="b279f076245d6570fa80bd2c0f70b8640b7878ca5c4cebf4ee7812920d7ec18e" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.739400 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b279f076245d6570fa80bd2c0f70b8640b7878ca5c4cebf4ee7812920d7ec18e"} err="failed to get container status \"b279f076245d6570fa80bd2c0f70b8640b7878ca5c4cebf4ee7812920d7ec18e\": rpc error: code = NotFound desc = could not find container \"b279f076245d6570fa80bd2c0f70b8640b7878ca5c4cebf4ee7812920d7ec18e\": container with ID starting with b279f076245d6570fa80bd2c0f70b8640b7878ca5c4cebf4ee7812920d7ec18e not found: ID does not exist" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.739416 4605 scope.go:117] "RemoveContainer" containerID="22522085bbc26921c8cae17525286950ff0268fd2de8065016012505c0c598d6" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.739603 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"22522085bbc26921c8cae17525286950ff0268fd2de8065016012505c0c598d6"} err="failed to get container status \"22522085bbc26921c8cae17525286950ff0268fd2de8065016012505c0c598d6\": rpc error: code = NotFound desc = could not find container \"22522085bbc26921c8cae17525286950ff0268fd2de8065016012505c0c598d6\": container with ID starting with 22522085bbc26921c8cae17525286950ff0268fd2de8065016012505c0c598d6 not found: ID does not exist" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.739620 4605 scope.go:117] "RemoveContainer" containerID="e8d7e721997d8f771d4064d87b6186ef46f12f26db2f23364532059a888198a7" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.740177 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e8d7e721997d8f771d4064d87b6186ef46f12f26db2f23364532059a888198a7"} err="failed to get container status \"e8d7e721997d8f771d4064d87b6186ef46f12f26db2f23364532059a888198a7\": rpc error: code = NotFound desc = could not find container \"e8d7e721997d8f771d4064d87b6186ef46f12f26db2f23364532059a888198a7\": container with ID starting with e8d7e721997d8f771d4064d87b6186ef46f12f26db2f23364532059a888198a7 not found: ID does not exist" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.740193 4605 scope.go:117] "RemoveContainer" containerID="a7f3826cd60b36ab64c07c52b899048372fc73d1b4e15d7f608a380b04f27d65" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.743481 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a7f3826cd60b36ab64c07c52b899048372fc73d1b4e15d7f608a380b04f27d65"} err="failed to get container status \"a7f3826cd60b36ab64c07c52b899048372fc73d1b4e15d7f608a380b04f27d65\": rpc error: code = NotFound desc = could not find container \"a7f3826cd60b36ab64c07c52b899048372fc73d1b4e15d7f608a380b04f27d65\": container with ID starting with a7f3826cd60b36ab64c07c52b899048372fc73d1b4e15d7f608a380b04f27d65 not found: ID does not exist" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.764543 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97d9b4b0-ddd6-46ce-9c48-add8f2f3e0b2-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"97d9b4b0-ddd6-46ce-9c48-add8f2f3e0b2\") " pod="openstack/nova-cell1-conductor-0" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.764754 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97d9b4b0-ddd6-46ce-9c48-add8f2f3e0b2-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"97d9b4b0-ddd6-46ce-9c48-add8f2f3e0b2\") " pod="openstack/nova-cell1-conductor-0" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.764834 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5hgd9\" (UniqueName: \"kubernetes.io/projected/97d9b4b0-ddd6-46ce-9c48-add8f2f3e0b2-kube-api-access-5hgd9\") pod \"nova-cell1-conductor-0\" (UID: \"97d9b4b0-ddd6-46ce-9c48-add8f2f3e0b2\") " pod="openstack/nova-cell1-conductor-0" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.764934 4605 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/26405c84-df24-41dc-ae5f-e3146954b888-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.765016 4605 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/26405c84-df24-41dc-ae5f-e3146954b888-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.765076 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p6ks5\" (UniqueName: \"kubernetes.io/projected/26405c84-df24-41dc-ae5f-e3146954b888-kube-api-access-p6ks5\") on node \"crc\" DevicePath \"\"" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.771738 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/26405c84-df24-41dc-ae5f-e3146954b888-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "26405c84-df24-41dc-ae5f-e3146954b888" (UID: "26405c84-df24-41dc-ae5f-e3146954b888"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.790266 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/26405c84-df24-41dc-ae5f-e3146954b888-config-data" (OuterVolumeSpecName: "config-data") pod "26405c84-df24-41dc-ae5f-e3146954b888" (UID: "26405c84-df24-41dc-ae5f-e3146954b888"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.866041 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5hgd9\" (UniqueName: \"kubernetes.io/projected/97d9b4b0-ddd6-46ce-9c48-add8f2f3e0b2-kube-api-access-5hgd9\") pod \"nova-cell1-conductor-0\" (UID: \"97d9b4b0-ddd6-46ce-9c48-add8f2f3e0b2\") " pod="openstack/nova-cell1-conductor-0" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.866214 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97d9b4b0-ddd6-46ce-9c48-add8f2f3e0b2-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"97d9b4b0-ddd6-46ce-9c48-add8f2f3e0b2\") " pod="openstack/nova-cell1-conductor-0" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.866243 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97d9b4b0-ddd6-46ce-9c48-add8f2f3e0b2-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"97d9b4b0-ddd6-46ce-9c48-add8f2f3e0b2\") " pod="openstack/nova-cell1-conductor-0" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.866284 4605 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/26405c84-df24-41dc-ae5f-e3146954b888-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.866294 4605 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/26405c84-df24-41dc-ae5f-e3146954b888-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.872777 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97d9b4b0-ddd6-46ce-9c48-add8f2f3e0b2-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"97d9b4b0-ddd6-46ce-9c48-add8f2f3e0b2\") " pod="openstack/nova-cell1-conductor-0" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.872926 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97d9b4b0-ddd6-46ce-9c48-add8f2f3e0b2-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"97d9b4b0-ddd6-46ce-9c48-add8f2f3e0b2\") " pod="openstack/nova-cell1-conductor-0" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.890800 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5hgd9\" (UniqueName: \"kubernetes.io/projected/97d9b4b0-ddd6-46ce-9c48-add8f2f3e0b2-kube-api-access-5hgd9\") pod \"nova-cell1-conductor-0\" (UID: \"97d9b4b0-ddd6-46ce-9c48-add8f2f3e0b2\") " pod="openstack/nova-cell1-conductor-0" Oct 01 14:05:00 crc kubenswrapper[4605]: I1001 14:05:00.943069 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 01 14:05:01 crc kubenswrapper[4605]: I1001 14:05:01.115136 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 01 14:05:01 crc kubenswrapper[4605]: I1001 14:05:01.145156 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 01 14:05:01 crc kubenswrapper[4605]: I1001 14:05:01.159861 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 01 14:05:01 crc kubenswrapper[4605]: I1001 14:05:01.162154 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 14:05:01 crc kubenswrapper[4605]: I1001 14:05:01.165570 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 01 14:05:01 crc kubenswrapper[4605]: I1001 14:05:01.165740 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 01 14:05:01 crc kubenswrapper[4605]: I1001 14:05:01.167495 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Oct 01 14:05:01 crc kubenswrapper[4605]: I1001 14:05:01.180315 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 01 14:05:01 crc kubenswrapper[4605]: I1001 14:05:01.277278 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/91f37707-14d5-4c53-9a0a-86d8249d42cf-log-httpd\") pod \"ceilometer-0\" (UID: \"91f37707-14d5-4c53-9a0a-86d8249d42cf\") " pod="openstack/ceilometer-0" Oct 01 14:05:01 crc kubenswrapper[4605]: I1001 14:05:01.277315 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91f37707-14d5-4c53-9a0a-86d8249d42cf-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"91f37707-14d5-4c53-9a0a-86d8249d42cf\") " pod="openstack/ceilometer-0" Oct 01 14:05:01 crc kubenswrapper[4605]: I1001 14:05:01.277355 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jcl85\" (UniqueName: \"kubernetes.io/projected/91f37707-14d5-4c53-9a0a-86d8249d42cf-kube-api-access-jcl85\") pod \"ceilometer-0\" (UID: \"91f37707-14d5-4c53-9a0a-86d8249d42cf\") " pod="openstack/ceilometer-0" Oct 01 14:05:01 crc kubenswrapper[4605]: I1001 14:05:01.277398 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/91f37707-14d5-4c53-9a0a-86d8249d42cf-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"91f37707-14d5-4c53-9a0a-86d8249d42cf\") " pod="openstack/ceilometer-0" Oct 01 14:05:01 crc kubenswrapper[4605]: I1001 14:05:01.277451 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/91f37707-14d5-4c53-9a0a-86d8249d42cf-scripts\") pod \"ceilometer-0\" (UID: \"91f37707-14d5-4c53-9a0a-86d8249d42cf\") " pod="openstack/ceilometer-0" Oct 01 14:05:01 crc kubenswrapper[4605]: I1001 14:05:01.277539 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91f37707-14d5-4c53-9a0a-86d8249d42cf-config-data\") pod \"ceilometer-0\" (UID: \"91f37707-14d5-4c53-9a0a-86d8249d42cf\") " pod="openstack/ceilometer-0" Oct 01 14:05:01 crc kubenswrapper[4605]: I1001 14:05:01.277593 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/91f37707-14d5-4c53-9a0a-86d8249d42cf-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"91f37707-14d5-4c53-9a0a-86d8249d42cf\") " pod="openstack/ceilometer-0" Oct 01 14:05:01 crc kubenswrapper[4605]: I1001 14:05:01.277627 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/91f37707-14d5-4c53-9a0a-86d8249d42cf-run-httpd\") pod \"ceilometer-0\" (UID: \"91f37707-14d5-4c53-9a0a-86d8249d42cf\") " pod="openstack/ceilometer-0" Oct 01 14:05:01 crc kubenswrapper[4605]: I1001 14:05:01.379566 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/91f37707-14d5-4c53-9a0a-86d8249d42cf-scripts\") pod \"ceilometer-0\" (UID: \"91f37707-14d5-4c53-9a0a-86d8249d42cf\") " pod="openstack/ceilometer-0" Oct 01 14:05:01 crc kubenswrapper[4605]: I1001 14:05:01.379955 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91f37707-14d5-4c53-9a0a-86d8249d42cf-config-data\") pod \"ceilometer-0\" (UID: \"91f37707-14d5-4c53-9a0a-86d8249d42cf\") " pod="openstack/ceilometer-0" Oct 01 14:05:01 crc kubenswrapper[4605]: I1001 14:05:01.380052 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/91f37707-14d5-4c53-9a0a-86d8249d42cf-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"91f37707-14d5-4c53-9a0a-86d8249d42cf\") " pod="openstack/ceilometer-0" Oct 01 14:05:01 crc kubenswrapper[4605]: I1001 14:05:01.380174 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/91f37707-14d5-4c53-9a0a-86d8249d42cf-run-httpd\") pod \"ceilometer-0\" (UID: \"91f37707-14d5-4c53-9a0a-86d8249d42cf\") " pod="openstack/ceilometer-0" Oct 01 14:05:01 crc kubenswrapper[4605]: I1001 14:05:01.380335 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/91f37707-14d5-4c53-9a0a-86d8249d42cf-log-httpd\") pod \"ceilometer-0\" (UID: \"91f37707-14d5-4c53-9a0a-86d8249d42cf\") " pod="openstack/ceilometer-0" Oct 01 14:05:01 crc kubenswrapper[4605]: I1001 14:05:01.380444 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91f37707-14d5-4c53-9a0a-86d8249d42cf-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"91f37707-14d5-4c53-9a0a-86d8249d42cf\") " pod="openstack/ceilometer-0" Oct 01 14:05:01 crc kubenswrapper[4605]: I1001 14:05:01.380903 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jcl85\" (UniqueName: \"kubernetes.io/projected/91f37707-14d5-4c53-9a0a-86d8249d42cf-kube-api-access-jcl85\") pod \"ceilometer-0\" (UID: \"91f37707-14d5-4c53-9a0a-86d8249d42cf\") " pod="openstack/ceilometer-0" Oct 01 14:05:01 crc kubenswrapper[4605]: I1001 14:05:01.381055 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/91f37707-14d5-4c53-9a0a-86d8249d42cf-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"91f37707-14d5-4c53-9a0a-86d8249d42cf\") " pod="openstack/ceilometer-0" Oct 01 14:05:01 crc kubenswrapper[4605]: I1001 14:05:01.380634 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/91f37707-14d5-4c53-9a0a-86d8249d42cf-log-httpd\") pod \"ceilometer-0\" (UID: \"91f37707-14d5-4c53-9a0a-86d8249d42cf\") " pod="openstack/ceilometer-0" Oct 01 14:05:01 crc kubenswrapper[4605]: I1001 14:05:01.380593 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/91f37707-14d5-4c53-9a0a-86d8249d42cf-run-httpd\") pod \"ceilometer-0\" (UID: \"91f37707-14d5-4c53-9a0a-86d8249d42cf\") " pod="openstack/ceilometer-0" Oct 01 14:05:01 crc kubenswrapper[4605]: I1001 14:05:01.385238 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91f37707-14d5-4c53-9a0a-86d8249d42cf-config-data\") pod \"ceilometer-0\" (UID: \"91f37707-14d5-4c53-9a0a-86d8249d42cf\") " pod="openstack/ceilometer-0" Oct 01 14:05:01 crc kubenswrapper[4605]: I1001 14:05:01.385243 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/91f37707-14d5-4c53-9a0a-86d8249d42cf-scripts\") pod \"ceilometer-0\" (UID: \"91f37707-14d5-4c53-9a0a-86d8249d42cf\") " pod="openstack/ceilometer-0" Oct 01 14:05:01 crc kubenswrapper[4605]: I1001 14:05:01.385635 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/91f37707-14d5-4c53-9a0a-86d8249d42cf-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"91f37707-14d5-4c53-9a0a-86d8249d42cf\") " pod="openstack/ceilometer-0" Oct 01 14:05:01 crc kubenswrapper[4605]: I1001 14:05:01.388673 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91f37707-14d5-4c53-9a0a-86d8249d42cf-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"91f37707-14d5-4c53-9a0a-86d8249d42cf\") " pod="openstack/ceilometer-0" Oct 01 14:05:01 crc kubenswrapper[4605]: I1001 14:05:01.389119 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/91f37707-14d5-4c53-9a0a-86d8249d42cf-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"91f37707-14d5-4c53-9a0a-86d8249d42cf\") " pod="openstack/ceilometer-0" Oct 01 14:05:01 crc kubenswrapper[4605]: I1001 14:05:01.397889 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jcl85\" (UniqueName: \"kubernetes.io/projected/91f37707-14d5-4c53-9a0a-86d8249d42cf-kube-api-access-jcl85\") pod \"ceilometer-0\" (UID: \"91f37707-14d5-4c53-9a0a-86d8249d42cf\") " pod="openstack/ceilometer-0" Oct 01 14:05:01 crc kubenswrapper[4605]: I1001 14:05:01.488980 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 01 14:05:01 crc kubenswrapper[4605]: I1001 14:05:01.489427 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 14:05:01 crc kubenswrapper[4605]: E1001 14:05:01.531685 4605 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="4c0d4b188b494cae5d9bc2598a1d30e0bb304bc09406ed468dfbfb920b3a7046" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 01 14:05:01 crc kubenswrapper[4605]: E1001 14:05:01.533585 4605 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="4c0d4b188b494cae5d9bc2598a1d30e0bb304bc09406ed468dfbfb920b3a7046" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 01 14:05:01 crc kubenswrapper[4605]: E1001 14:05:01.534510 4605 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="4c0d4b188b494cae5d9bc2598a1d30e0bb304bc09406ed468dfbfb920b3a7046" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 01 14:05:01 crc kubenswrapper[4605]: E1001 14:05:01.534612 4605 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="f5ff5fdc-351d-40e4-9ecb-5bf4173987a0" containerName="nova-scheduler-scheduler" Oct 01 14:05:01 crc kubenswrapper[4605]: I1001 14:05:01.585605 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"97d9b4b0-ddd6-46ce-9c48-add8f2f3e0b2","Type":"ContainerStarted","Data":"b6c623994894b4f566121925b8ea96ffe08fcb64cdf7f79b09a90cb8b0f89992"} Oct 01 14:05:01 crc kubenswrapper[4605]: I1001 14:05:01.943049 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="26405c84-df24-41dc-ae5f-e3146954b888" path="/var/lib/kubelet/pods/26405c84-df24-41dc-ae5f-e3146954b888/volumes" Oct 01 14:05:01 crc kubenswrapper[4605]: I1001 14:05:01.946155 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 01 14:05:01 crc kubenswrapper[4605]: W1001 14:05:01.946821 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod91f37707_14d5_4c53_9a0a_86d8249d42cf.slice/crio-c918fb4b172821262637a81f4d4b4617b72a37b8bf0978fb4face9bae710f424 WatchSource:0}: Error finding container c918fb4b172821262637a81f4d4b4617b72a37b8bf0978fb4face9bae710f424: Status 404 returned error can't find the container with id c918fb4b172821262637a81f4d4b4617b72a37b8bf0978fb4face9bae710f424 Oct 01 14:05:02 crc kubenswrapper[4605]: I1001 14:05:02.596748 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"91f37707-14d5-4c53-9a0a-86d8249d42cf","Type":"ContainerStarted","Data":"c918fb4b172821262637a81f4d4b4617b72a37b8bf0978fb4face9bae710f424"} Oct 01 14:05:02 crc kubenswrapper[4605]: I1001 14:05:02.599204 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"97d9b4b0-ddd6-46ce-9c48-add8f2f3e0b2","Type":"ContainerStarted","Data":"80661446f93750ed4497f5bd798bd80d93272c243fc59d2b7a7cf991665d1806"} Oct 01 14:05:02 crc kubenswrapper[4605]: I1001 14:05:02.599523 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Oct 01 14:05:02 crc kubenswrapper[4605]: I1001 14:05:02.615153 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.6150680619999997 podStartE2EDuration="2.615068062s" podCreationTimestamp="2025-10-01 14:05:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 14:05:02.613531393 +0000 UTC m=+1225.357507611" watchObservedRunningTime="2025-10-01 14:05:02.615068062 +0000 UTC m=+1225.359044290" Oct 01 14:05:03 crc kubenswrapper[4605]: I1001 14:05:03.609356 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"91f37707-14d5-4c53-9a0a-86d8249d42cf","Type":"ContainerStarted","Data":"605cbd1ab37eeb3b0c8b476382eb418eb1fce8f12a23399a2451c10c36397891"} Oct 01 14:05:03 crc kubenswrapper[4605]: I1001 14:05:03.609721 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"91f37707-14d5-4c53-9a0a-86d8249d42cf","Type":"ContainerStarted","Data":"d0c4561f6b101474ce082218a2af0c419cdd40f340e43c26ce7a8beacbfbc15c"} Oct 01 14:05:04 crc kubenswrapper[4605]: I1001 14:05:04.622809 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"91f37707-14d5-4c53-9a0a-86d8249d42cf","Type":"ContainerStarted","Data":"6ca09ee632ed58998ef472386027b7e242212810aa3dae0a0f13278563b4a6b6"} Oct 01 14:05:04 crc kubenswrapper[4605]: I1001 14:05:04.625839 4605 generic.go:334] "Generic (PLEG): container finished" podID="f5ff5fdc-351d-40e4-9ecb-5bf4173987a0" containerID="4c0d4b188b494cae5d9bc2598a1d30e0bb304bc09406ed468dfbfb920b3a7046" exitCode=0 Oct 01 14:05:04 crc kubenswrapper[4605]: I1001 14:05:04.625893 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"f5ff5fdc-351d-40e4-9ecb-5bf4173987a0","Type":"ContainerDied","Data":"4c0d4b188b494cae5d9bc2598a1d30e0bb304bc09406ed468dfbfb920b3a7046"} Oct 01 14:05:04 crc kubenswrapper[4605]: I1001 14:05:04.857795 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 01 14:05:05 crc kubenswrapper[4605]: I1001 14:05:05.041545 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5ff5fdc-351d-40e4-9ecb-5bf4173987a0-combined-ca-bundle\") pod \"f5ff5fdc-351d-40e4-9ecb-5bf4173987a0\" (UID: \"f5ff5fdc-351d-40e4-9ecb-5bf4173987a0\") " Oct 01 14:05:05 crc kubenswrapper[4605]: I1001 14:05:05.041879 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tp94w\" (UniqueName: \"kubernetes.io/projected/f5ff5fdc-351d-40e4-9ecb-5bf4173987a0-kube-api-access-tp94w\") pod \"f5ff5fdc-351d-40e4-9ecb-5bf4173987a0\" (UID: \"f5ff5fdc-351d-40e4-9ecb-5bf4173987a0\") " Oct 01 14:05:05 crc kubenswrapper[4605]: I1001 14:05:05.041964 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5ff5fdc-351d-40e4-9ecb-5bf4173987a0-config-data\") pod \"f5ff5fdc-351d-40e4-9ecb-5bf4173987a0\" (UID: \"f5ff5fdc-351d-40e4-9ecb-5bf4173987a0\") " Oct 01 14:05:05 crc kubenswrapper[4605]: I1001 14:05:05.049012 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f5ff5fdc-351d-40e4-9ecb-5bf4173987a0-kube-api-access-tp94w" (OuterVolumeSpecName: "kube-api-access-tp94w") pod "f5ff5fdc-351d-40e4-9ecb-5bf4173987a0" (UID: "f5ff5fdc-351d-40e4-9ecb-5bf4173987a0"). InnerVolumeSpecName "kube-api-access-tp94w". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:05:05 crc kubenswrapper[4605]: I1001 14:05:05.103003 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5ff5fdc-351d-40e4-9ecb-5bf4173987a0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f5ff5fdc-351d-40e4-9ecb-5bf4173987a0" (UID: "f5ff5fdc-351d-40e4-9ecb-5bf4173987a0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:05:05 crc kubenswrapper[4605]: I1001 14:05:05.144319 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5ff5fdc-351d-40e4-9ecb-5bf4173987a0-config-data" (OuterVolumeSpecName: "config-data") pod "f5ff5fdc-351d-40e4-9ecb-5bf4173987a0" (UID: "f5ff5fdc-351d-40e4-9ecb-5bf4173987a0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:05:05 crc kubenswrapper[4605]: I1001 14:05:05.145477 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5ff5fdc-351d-40e4-9ecb-5bf4173987a0-config-data\") pod \"f5ff5fdc-351d-40e4-9ecb-5bf4173987a0\" (UID: \"f5ff5fdc-351d-40e4-9ecb-5bf4173987a0\") " Oct 01 14:05:05 crc kubenswrapper[4605]: W1001 14:05:05.145597 4605 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/f5ff5fdc-351d-40e4-9ecb-5bf4173987a0/volumes/kubernetes.io~secret/config-data Oct 01 14:05:05 crc kubenswrapper[4605]: I1001 14:05:05.145612 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5ff5fdc-351d-40e4-9ecb-5bf4173987a0-config-data" (OuterVolumeSpecName: "config-data") pod "f5ff5fdc-351d-40e4-9ecb-5bf4173987a0" (UID: "f5ff5fdc-351d-40e4-9ecb-5bf4173987a0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:05:05 crc kubenswrapper[4605]: I1001 14:05:05.146195 4605 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5ff5fdc-351d-40e4-9ecb-5bf4173987a0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 14:05:05 crc kubenswrapper[4605]: I1001 14:05:05.146269 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tp94w\" (UniqueName: \"kubernetes.io/projected/f5ff5fdc-351d-40e4-9ecb-5bf4173987a0-kube-api-access-tp94w\") on node \"crc\" DevicePath \"\"" Oct 01 14:05:05 crc kubenswrapper[4605]: I1001 14:05:05.146331 4605 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5ff5fdc-351d-40e4-9ecb-5bf4173987a0-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 14:05:05 crc kubenswrapper[4605]: I1001 14:05:05.311243 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 01 14:05:05 crc kubenswrapper[4605]: I1001 14:05:05.452606 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0423df9d-25a8-45f3-9a48-f8f01d526f37-logs\") pod \"0423df9d-25a8-45f3-9a48-f8f01d526f37\" (UID: \"0423df9d-25a8-45f3-9a48-f8f01d526f37\") " Oct 01 14:05:05 crc kubenswrapper[4605]: I1001 14:05:05.453309 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0423df9d-25a8-45f3-9a48-f8f01d526f37-logs" (OuterVolumeSpecName: "logs") pod "0423df9d-25a8-45f3-9a48-f8f01d526f37" (UID: "0423df9d-25a8-45f3-9a48-f8f01d526f37"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:05:05 crc kubenswrapper[4605]: I1001 14:05:05.452789 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0423df9d-25a8-45f3-9a48-f8f01d526f37-combined-ca-bundle\") pod \"0423df9d-25a8-45f3-9a48-f8f01d526f37\" (UID: \"0423df9d-25a8-45f3-9a48-f8f01d526f37\") " Oct 01 14:05:05 crc kubenswrapper[4605]: I1001 14:05:05.453551 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jl949\" (UniqueName: \"kubernetes.io/projected/0423df9d-25a8-45f3-9a48-f8f01d526f37-kube-api-access-jl949\") pod \"0423df9d-25a8-45f3-9a48-f8f01d526f37\" (UID: \"0423df9d-25a8-45f3-9a48-f8f01d526f37\") " Oct 01 14:05:05 crc kubenswrapper[4605]: I1001 14:05:05.453670 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0423df9d-25a8-45f3-9a48-f8f01d526f37-config-data\") pod \"0423df9d-25a8-45f3-9a48-f8f01d526f37\" (UID: \"0423df9d-25a8-45f3-9a48-f8f01d526f37\") " Oct 01 14:05:05 crc kubenswrapper[4605]: I1001 14:05:05.454176 4605 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0423df9d-25a8-45f3-9a48-f8f01d526f37-logs\") on node \"crc\" DevicePath \"\"" Oct 01 14:05:05 crc kubenswrapper[4605]: I1001 14:05:05.476054 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0423df9d-25a8-45f3-9a48-f8f01d526f37-kube-api-access-jl949" (OuterVolumeSpecName: "kube-api-access-jl949") pod "0423df9d-25a8-45f3-9a48-f8f01d526f37" (UID: "0423df9d-25a8-45f3-9a48-f8f01d526f37"). InnerVolumeSpecName "kube-api-access-jl949". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:05:05 crc kubenswrapper[4605]: I1001 14:05:05.490695 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0423df9d-25a8-45f3-9a48-f8f01d526f37-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0423df9d-25a8-45f3-9a48-f8f01d526f37" (UID: "0423df9d-25a8-45f3-9a48-f8f01d526f37"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:05:05 crc kubenswrapper[4605]: I1001 14:05:05.491779 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0423df9d-25a8-45f3-9a48-f8f01d526f37-config-data" (OuterVolumeSpecName: "config-data") pod "0423df9d-25a8-45f3-9a48-f8f01d526f37" (UID: "0423df9d-25a8-45f3-9a48-f8f01d526f37"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:05:05 crc kubenswrapper[4605]: I1001 14:05:05.556295 4605 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0423df9d-25a8-45f3-9a48-f8f01d526f37-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 14:05:05 crc kubenswrapper[4605]: I1001 14:05:05.556328 4605 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0423df9d-25a8-45f3-9a48-f8f01d526f37-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 14:05:05 crc kubenswrapper[4605]: I1001 14:05:05.556340 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jl949\" (UniqueName: \"kubernetes.io/projected/0423df9d-25a8-45f3-9a48-f8f01d526f37-kube-api-access-jl949\") on node \"crc\" DevicePath \"\"" Oct 01 14:05:05 crc kubenswrapper[4605]: I1001 14:05:05.639329 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"91f37707-14d5-4c53-9a0a-86d8249d42cf","Type":"ContainerStarted","Data":"c78d9bfa7d34ba0bc5ef17adc381e76099de52cd66a4b4d4adb8eb12967926ae"} Oct 01 14:05:05 crc kubenswrapper[4605]: I1001 14:05:05.639524 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 01 14:05:05 crc kubenswrapper[4605]: I1001 14:05:05.641638 4605 generic.go:334] "Generic (PLEG): container finished" podID="0423df9d-25a8-45f3-9a48-f8f01d526f37" containerID="57c6936a6e4b8eded978cef75d00b2811593015696181d14b906313bf23ece31" exitCode=0 Oct 01 14:05:05 crc kubenswrapper[4605]: I1001 14:05:05.641706 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"0423df9d-25a8-45f3-9a48-f8f01d526f37","Type":"ContainerDied","Data":"57c6936a6e4b8eded978cef75d00b2811593015696181d14b906313bf23ece31"} Oct 01 14:05:05 crc kubenswrapper[4605]: I1001 14:05:05.641737 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"0423df9d-25a8-45f3-9a48-f8f01d526f37","Type":"ContainerDied","Data":"1115ed16bdff5d010b3322aebf07f6c887d985fa8fea9c2d0d1fae7229a4b6a3"} Oct 01 14:05:05 crc kubenswrapper[4605]: I1001 14:05:05.641785 4605 scope.go:117] "RemoveContainer" containerID="57c6936a6e4b8eded978cef75d00b2811593015696181d14b906313bf23ece31" Oct 01 14:05:05 crc kubenswrapper[4605]: I1001 14:05:05.641724 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 01 14:05:05 crc kubenswrapper[4605]: I1001 14:05:05.644295 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"f5ff5fdc-351d-40e4-9ecb-5bf4173987a0","Type":"ContainerDied","Data":"46358cd9b87c25b4deb2e2ad16aa5d63b5dfc3f5db01dc4181a42b844de08321"} Oct 01 14:05:05 crc kubenswrapper[4605]: I1001 14:05:05.644370 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 01 14:05:05 crc kubenswrapper[4605]: I1001 14:05:05.680536 4605 scope.go:117] "RemoveContainer" containerID="f09336d890304474dcd55d9f8ca93991cd0c6c655c452a3e0a5f4d7ec1a7af98" Oct 01 14:05:05 crc kubenswrapper[4605]: I1001 14:05:05.749461 4605 scope.go:117] "RemoveContainer" containerID="57c6936a6e4b8eded978cef75d00b2811593015696181d14b906313bf23ece31" Oct 01 14:05:05 crc kubenswrapper[4605]: E1001 14:05:05.757313 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"57c6936a6e4b8eded978cef75d00b2811593015696181d14b906313bf23ece31\": container with ID starting with 57c6936a6e4b8eded978cef75d00b2811593015696181d14b906313bf23ece31 not found: ID does not exist" containerID="57c6936a6e4b8eded978cef75d00b2811593015696181d14b906313bf23ece31" Oct 01 14:05:05 crc kubenswrapper[4605]: I1001 14:05:05.757353 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"57c6936a6e4b8eded978cef75d00b2811593015696181d14b906313bf23ece31"} err="failed to get container status \"57c6936a6e4b8eded978cef75d00b2811593015696181d14b906313bf23ece31\": rpc error: code = NotFound desc = could not find container \"57c6936a6e4b8eded978cef75d00b2811593015696181d14b906313bf23ece31\": container with ID starting with 57c6936a6e4b8eded978cef75d00b2811593015696181d14b906313bf23ece31 not found: ID does not exist" Oct 01 14:05:05 crc kubenswrapper[4605]: I1001 14:05:05.757379 4605 scope.go:117] "RemoveContainer" containerID="f09336d890304474dcd55d9f8ca93991cd0c6c655c452a3e0a5f4d7ec1a7af98" Oct 01 14:05:05 crc kubenswrapper[4605]: E1001 14:05:05.760408 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f09336d890304474dcd55d9f8ca93991cd0c6c655c452a3e0a5f4d7ec1a7af98\": container with ID starting with f09336d890304474dcd55d9f8ca93991cd0c6c655c452a3e0a5f4d7ec1a7af98 not found: ID does not exist" containerID="f09336d890304474dcd55d9f8ca93991cd0c6c655c452a3e0a5f4d7ec1a7af98" Oct 01 14:05:05 crc kubenswrapper[4605]: I1001 14:05:05.760457 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f09336d890304474dcd55d9f8ca93991cd0c6c655c452a3e0a5f4d7ec1a7af98"} err="failed to get container status \"f09336d890304474dcd55d9f8ca93991cd0c6c655c452a3e0a5f4d7ec1a7af98\": rpc error: code = NotFound desc = could not find container \"f09336d890304474dcd55d9f8ca93991cd0c6c655c452a3e0a5f4d7ec1a7af98\": container with ID starting with f09336d890304474dcd55d9f8ca93991cd0c6c655c452a3e0a5f4d7ec1a7af98 not found: ID does not exist" Oct 01 14:05:05 crc kubenswrapper[4605]: I1001 14:05:05.760488 4605 scope.go:117] "RemoveContainer" containerID="4c0d4b188b494cae5d9bc2598a1d30e0bb304bc09406ed468dfbfb920b3a7046" Oct 01 14:05:05 crc kubenswrapper[4605]: I1001 14:05:05.771627 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.623786693 podStartE2EDuration="4.771600914s" podCreationTimestamp="2025-10-01 14:05:01 +0000 UTC" firstStartedPulling="2025-10-01 14:05:01.949837779 +0000 UTC m=+1224.693813987" lastFinishedPulling="2025-10-01 14:05:05.09765201 +0000 UTC m=+1227.841628208" observedRunningTime="2025-10-01 14:05:05.700403873 +0000 UTC m=+1228.444380081" watchObservedRunningTime="2025-10-01 14:05:05.771600914 +0000 UTC m=+1228.515577122" Oct 01 14:05:05 crc kubenswrapper[4605]: I1001 14:05:05.775121 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 01 14:05:05 crc kubenswrapper[4605]: I1001 14:05:05.781742 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Oct 01 14:05:05 crc kubenswrapper[4605]: I1001 14:05:05.819894 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 01 14:05:05 crc kubenswrapper[4605]: I1001 14:05:05.841221 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 01 14:05:05 crc kubenswrapper[4605]: E1001 14:05:05.841651 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0423df9d-25a8-45f3-9a48-f8f01d526f37" containerName="nova-api-log" Oct 01 14:05:05 crc kubenswrapper[4605]: I1001 14:05:05.841669 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="0423df9d-25a8-45f3-9a48-f8f01d526f37" containerName="nova-api-log" Oct 01 14:05:05 crc kubenswrapper[4605]: E1001 14:05:05.841686 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0423df9d-25a8-45f3-9a48-f8f01d526f37" containerName="nova-api-api" Oct 01 14:05:05 crc kubenswrapper[4605]: I1001 14:05:05.841692 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="0423df9d-25a8-45f3-9a48-f8f01d526f37" containerName="nova-api-api" Oct 01 14:05:05 crc kubenswrapper[4605]: E1001 14:05:05.841729 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5ff5fdc-351d-40e4-9ecb-5bf4173987a0" containerName="nova-scheduler-scheduler" Oct 01 14:05:05 crc kubenswrapper[4605]: I1001 14:05:05.841738 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5ff5fdc-351d-40e4-9ecb-5bf4173987a0" containerName="nova-scheduler-scheduler" Oct 01 14:05:05 crc kubenswrapper[4605]: I1001 14:05:05.841923 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5ff5fdc-351d-40e4-9ecb-5bf4173987a0" containerName="nova-scheduler-scheduler" Oct 01 14:05:05 crc kubenswrapper[4605]: I1001 14:05:05.841943 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="0423df9d-25a8-45f3-9a48-f8f01d526f37" containerName="nova-api-api" Oct 01 14:05:05 crc kubenswrapper[4605]: I1001 14:05:05.841959 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="0423df9d-25a8-45f3-9a48-f8f01d526f37" containerName="nova-api-log" Oct 01 14:05:05 crc kubenswrapper[4605]: I1001 14:05:05.842910 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 01 14:05:05 crc kubenswrapper[4605]: I1001 14:05:05.850723 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 01 14:05:05 crc kubenswrapper[4605]: I1001 14:05:05.862698 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Oct 01 14:05:05 crc kubenswrapper[4605]: I1001 14:05:05.896209 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Oct 01 14:05:05 crc kubenswrapper[4605]: I1001 14:05:05.897984 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 01 14:05:05 crc kubenswrapper[4605]: I1001 14:05:05.902768 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Oct 01 14:05:05 crc kubenswrapper[4605]: I1001 14:05:05.902945 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 01 14:05:06 crc kubenswrapper[4605]: I1001 14:05:06.005859 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-59gp9\" (UniqueName: \"kubernetes.io/projected/704f2039-49de-4306-a09a-9e726133ee40-kube-api-access-59gp9\") pod \"nova-api-0\" (UID: \"704f2039-49de-4306-a09a-9e726133ee40\") " pod="openstack/nova-api-0" Oct 01 14:05:06 crc kubenswrapper[4605]: I1001 14:05:06.005958 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/704f2039-49de-4306-a09a-9e726133ee40-config-data\") pod \"nova-api-0\" (UID: \"704f2039-49de-4306-a09a-9e726133ee40\") " pod="openstack/nova-api-0" Oct 01 14:05:06 crc kubenswrapper[4605]: I1001 14:05:06.006012 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/704f2039-49de-4306-a09a-9e726133ee40-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"704f2039-49de-4306-a09a-9e726133ee40\") " pod="openstack/nova-api-0" Oct 01 14:05:06 crc kubenswrapper[4605]: I1001 14:05:06.006184 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/704f2039-49de-4306-a09a-9e726133ee40-logs\") pod \"nova-api-0\" (UID: \"704f2039-49de-4306-a09a-9e726133ee40\") " pod="openstack/nova-api-0" Oct 01 14:05:06 crc kubenswrapper[4605]: I1001 14:05:06.029556 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0423df9d-25a8-45f3-9a48-f8f01d526f37" path="/var/lib/kubelet/pods/0423df9d-25a8-45f3-9a48-f8f01d526f37/volumes" Oct 01 14:05:06 crc kubenswrapper[4605]: I1001 14:05:06.033775 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f5ff5fdc-351d-40e4-9ecb-5bf4173987a0" path="/var/lib/kubelet/pods/f5ff5fdc-351d-40e4-9ecb-5bf4173987a0/volumes" Oct 01 14:05:06 crc kubenswrapper[4605]: I1001 14:05:06.051505 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 01 14:05:06 crc kubenswrapper[4605]: I1001 14:05:06.107400 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/704f2039-49de-4306-a09a-9e726133ee40-logs\") pod \"nova-api-0\" (UID: \"704f2039-49de-4306-a09a-9e726133ee40\") " pod="openstack/nova-api-0" Oct 01 14:05:06 crc kubenswrapper[4605]: I1001 14:05:06.107922 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/704f2039-49de-4306-a09a-9e726133ee40-logs\") pod \"nova-api-0\" (UID: \"704f2039-49de-4306-a09a-9e726133ee40\") " pod="openstack/nova-api-0" Oct 01 14:05:06 crc kubenswrapper[4605]: I1001 14:05:06.108022 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nclzg\" (UniqueName: \"kubernetes.io/projected/528d903b-d9c0-442c-b3c3-8b576405a91b-kube-api-access-nclzg\") pod \"nova-scheduler-0\" (UID: \"528d903b-d9c0-442c-b3c3-8b576405a91b\") " pod="openstack/nova-scheduler-0" Oct 01 14:05:06 crc kubenswrapper[4605]: I1001 14:05:06.108129 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/528d903b-d9c0-442c-b3c3-8b576405a91b-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"528d903b-d9c0-442c-b3c3-8b576405a91b\") " pod="openstack/nova-scheduler-0" Oct 01 14:05:06 crc kubenswrapper[4605]: I1001 14:05:06.108213 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-59gp9\" (UniqueName: \"kubernetes.io/projected/704f2039-49de-4306-a09a-9e726133ee40-kube-api-access-59gp9\") pod \"nova-api-0\" (UID: \"704f2039-49de-4306-a09a-9e726133ee40\") " pod="openstack/nova-api-0" Oct 01 14:05:06 crc kubenswrapper[4605]: I1001 14:05:06.108475 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/704f2039-49de-4306-a09a-9e726133ee40-config-data\") pod \"nova-api-0\" (UID: \"704f2039-49de-4306-a09a-9e726133ee40\") " pod="openstack/nova-api-0" Oct 01 14:05:06 crc kubenswrapper[4605]: I1001 14:05:06.108534 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/528d903b-d9c0-442c-b3c3-8b576405a91b-config-data\") pod \"nova-scheduler-0\" (UID: \"528d903b-d9c0-442c-b3c3-8b576405a91b\") " pod="openstack/nova-scheduler-0" Oct 01 14:05:06 crc kubenswrapper[4605]: I1001 14:05:06.108578 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/704f2039-49de-4306-a09a-9e726133ee40-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"704f2039-49de-4306-a09a-9e726133ee40\") " pod="openstack/nova-api-0" Oct 01 14:05:06 crc kubenswrapper[4605]: I1001 14:05:06.113348 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/704f2039-49de-4306-a09a-9e726133ee40-config-data\") pod \"nova-api-0\" (UID: \"704f2039-49de-4306-a09a-9e726133ee40\") " pod="openstack/nova-api-0" Oct 01 14:05:06 crc kubenswrapper[4605]: I1001 14:05:06.114639 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/704f2039-49de-4306-a09a-9e726133ee40-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"704f2039-49de-4306-a09a-9e726133ee40\") " pod="openstack/nova-api-0" Oct 01 14:05:06 crc kubenswrapper[4605]: I1001 14:05:06.139961 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-59gp9\" (UniqueName: \"kubernetes.io/projected/704f2039-49de-4306-a09a-9e726133ee40-kube-api-access-59gp9\") pod \"nova-api-0\" (UID: \"704f2039-49de-4306-a09a-9e726133ee40\") " pod="openstack/nova-api-0" Oct 01 14:05:06 crc kubenswrapper[4605]: I1001 14:05:06.210227 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/528d903b-d9c0-442c-b3c3-8b576405a91b-config-data\") pod \"nova-scheduler-0\" (UID: \"528d903b-d9c0-442c-b3c3-8b576405a91b\") " pod="openstack/nova-scheduler-0" Oct 01 14:05:06 crc kubenswrapper[4605]: I1001 14:05:06.210357 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nclzg\" (UniqueName: \"kubernetes.io/projected/528d903b-d9c0-442c-b3c3-8b576405a91b-kube-api-access-nclzg\") pod \"nova-scheduler-0\" (UID: \"528d903b-d9c0-442c-b3c3-8b576405a91b\") " pod="openstack/nova-scheduler-0" Oct 01 14:05:06 crc kubenswrapper[4605]: I1001 14:05:06.210380 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/528d903b-d9c0-442c-b3c3-8b576405a91b-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"528d903b-d9c0-442c-b3c3-8b576405a91b\") " pod="openstack/nova-scheduler-0" Oct 01 14:05:06 crc kubenswrapper[4605]: I1001 14:05:06.213752 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/528d903b-d9c0-442c-b3c3-8b576405a91b-config-data\") pod \"nova-scheduler-0\" (UID: \"528d903b-d9c0-442c-b3c3-8b576405a91b\") " pod="openstack/nova-scheduler-0" Oct 01 14:05:06 crc kubenswrapper[4605]: I1001 14:05:06.213980 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/528d903b-d9c0-442c-b3c3-8b576405a91b-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"528d903b-d9c0-442c-b3c3-8b576405a91b\") " pod="openstack/nova-scheduler-0" Oct 01 14:05:06 crc kubenswrapper[4605]: I1001 14:05:06.238547 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nclzg\" (UniqueName: \"kubernetes.io/projected/528d903b-d9c0-442c-b3c3-8b576405a91b-kube-api-access-nclzg\") pod \"nova-scheduler-0\" (UID: \"528d903b-d9c0-442c-b3c3-8b576405a91b\") " pod="openstack/nova-scheduler-0" Oct 01 14:05:06 crc kubenswrapper[4605]: I1001 14:05:06.267594 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 01 14:05:06 crc kubenswrapper[4605]: I1001 14:05:06.307628 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 01 14:05:06 crc kubenswrapper[4605]: I1001 14:05:06.739703 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 01 14:05:06 crc kubenswrapper[4605]: W1001 14:05:06.744885 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod704f2039_49de_4306_a09a_9e726133ee40.slice/crio-44baeabb276acb2454b1e1b39fb1203518782c012fd155af8a0beac5c987f78e WatchSource:0}: Error finding container 44baeabb276acb2454b1e1b39fb1203518782c012fd155af8a0beac5c987f78e: Status 404 returned error can't find the container with id 44baeabb276acb2454b1e1b39fb1203518782c012fd155af8a0beac5c987f78e Oct 01 14:05:06 crc kubenswrapper[4605]: I1001 14:05:06.837840 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 01 14:05:06 crc kubenswrapper[4605]: W1001 14:05:06.839404 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod528d903b_d9c0_442c_b3c3_8b576405a91b.slice/crio-5da4b6aabac6f5d537cbbc9852e2dd69709ebbb5b4631520ab7525a800f083a2 WatchSource:0}: Error finding container 5da4b6aabac6f5d537cbbc9852e2dd69709ebbb5b4631520ab7525a800f083a2: Status 404 returned error can't find the container with id 5da4b6aabac6f5d537cbbc9852e2dd69709ebbb5b4631520ab7525a800f083a2 Oct 01 14:05:07 crc kubenswrapper[4605]: I1001 14:05:07.673903 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"528d903b-d9c0-442c-b3c3-8b576405a91b","Type":"ContainerStarted","Data":"a44a8367b80b3f0f87ef664c54f17edc590f66e9a7a67748cacc80e98df85ce5"} Oct 01 14:05:07 crc kubenswrapper[4605]: I1001 14:05:07.674321 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"528d903b-d9c0-442c-b3c3-8b576405a91b","Type":"ContainerStarted","Data":"5da4b6aabac6f5d537cbbc9852e2dd69709ebbb5b4631520ab7525a800f083a2"} Oct 01 14:05:07 crc kubenswrapper[4605]: I1001 14:05:07.676851 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"704f2039-49de-4306-a09a-9e726133ee40","Type":"ContainerStarted","Data":"a376c7bb725ce7533bab5b7698ec37ca2dd22eabc8a57e82ee100f9dbfc8a34b"} Oct 01 14:05:07 crc kubenswrapper[4605]: I1001 14:05:07.676898 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"704f2039-49de-4306-a09a-9e726133ee40","Type":"ContainerStarted","Data":"d34cf31d6cca43611bb76b1b906854a828a81538116a8410d9cec60fd5801640"} Oct 01 14:05:07 crc kubenswrapper[4605]: I1001 14:05:07.676910 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"704f2039-49de-4306-a09a-9e726133ee40","Type":"ContainerStarted","Data":"44baeabb276acb2454b1e1b39fb1203518782c012fd155af8a0beac5c987f78e"} Oct 01 14:05:07 crc kubenswrapper[4605]: I1001 14:05:07.693691 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.6936747260000002 podStartE2EDuration="2.693674726s" podCreationTimestamp="2025-10-01 14:05:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 14:05:07.687678845 +0000 UTC m=+1230.431655043" watchObservedRunningTime="2025-10-01 14:05:07.693674726 +0000 UTC m=+1230.437650934" Oct 01 14:05:07 crc kubenswrapper[4605]: I1001 14:05:07.720583 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.720565686 podStartE2EDuration="2.720565686s" podCreationTimestamp="2025-10-01 14:05:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 14:05:07.716314748 +0000 UTC m=+1230.460290966" watchObservedRunningTime="2025-10-01 14:05:07.720565686 +0000 UTC m=+1230.464541894" Oct 01 14:05:08 crc kubenswrapper[4605]: I1001 14:05:08.092952 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Oct 01 14:05:10 crc kubenswrapper[4605]: I1001 14:05:10.972490 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Oct 01 14:05:11 crc kubenswrapper[4605]: I1001 14:05:11.307807 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Oct 01 14:05:16 crc kubenswrapper[4605]: I1001 14:05:16.268158 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 01 14:05:16 crc kubenswrapper[4605]: I1001 14:05:16.268471 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 01 14:05:16 crc kubenswrapper[4605]: I1001 14:05:16.307895 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Oct 01 14:05:16 crc kubenswrapper[4605]: I1001 14:05:16.343489 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Oct 01 14:05:16 crc kubenswrapper[4605]: I1001 14:05:16.791544 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Oct 01 14:05:17 crc kubenswrapper[4605]: I1001 14:05:17.353353 4605 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="704f2039-49de-4306-a09a-9e726133ee40" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.196:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 01 14:05:17 crc kubenswrapper[4605]: I1001 14:05:17.353567 4605 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="704f2039-49de-4306-a09a-9e726133ee40" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.196:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 01 14:05:22 crc kubenswrapper[4605]: I1001 14:05:22.816763 4605 generic.go:334] "Generic (PLEG): container finished" podID="699a0e0b-af6b-4f0e-a7ae-7312d9a0b10c" containerID="e63a9c9fa0be55ac8f9bf99febd858d93a221af5fe4caed4032d9a8fb8c6984f" exitCode=137 Oct 01 14:05:22 crc kubenswrapper[4605]: I1001 14:05:22.816801 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"699a0e0b-af6b-4f0e-a7ae-7312d9a0b10c","Type":"ContainerDied","Data":"e63a9c9fa0be55ac8f9bf99febd858d93a221af5fe4caed4032d9a8fb8c6984f"} Oct 01 14:05:22 crc kubenswrapper[4605]: I1001 14:05:22.817141 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"699a0e0b-af6b-4f0e-a7ae-7312d9a0b10c","Type":"ContainerDied","Data":"6a31bba209da85f1d37f251b9b9d3e3893c820a479cb655b193f06cea9885b1f"} Oct 01 14:05:22 crc kubenswrapper[4605]: I1001 14:05:22.817160 4605 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6a31bba209da85f1d37f251b9b9d3e3893c820a479cb655b193f06cea9885b1f" Oct 01 14:05:22 crc kubenswrapper[4605]: I1001 14:05:22.819113 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"272aa35e-0ea5-43bc-996a-d3f010dc94ac","Type":"ContainerDied","Data":"4cc13cd50b58ff7ceb886e971b88a8db673aba466c79ec59bdeeeaa9e14afcf0"} Oct 01 14:05:22 crc kubenswrapper[4605]: I1001 14:05:22.819130 4605 generic.go:334] "Generic (PLEG): container finished" podID="272aa35e-0ea5-43bc-996a-d3f010dc94ac" containerID="4cc13cd50b58ff7ceb886e971b88a8db673aba466c79ec59bdeeeaa9e14afcf0" exitCode=137 Oct 01 14:05:22 crc kubenswrapper[4605]: I1001 14:05:22.819156 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"272aa35e-0ea5-43bc-996a-d3f010dc94ac","Type":"ContainerDied","Data":"1dc90f68148ccdc97e44c85deab406bbc81f8c5636dc5aceabe7f6174ae53149"} Oct 01 14:05:22 crc kubenswrapper[4605]: I1001 14:05:22.819170 4605 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1dc90f68148ccdc97e44c85deab406bbc81f8c5636dc5aceabe7f6174ae53149" Oct 01 14:05:22 crc kubenswrapper[4605]: I1001 14:05:22.850188 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 01 14:05:22 crc kubenswrapper[4605]: I1001 14:05:22.855866 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 01 14:05:23 crc kubenswrapper[4605]: I1001 14:05:23.034795 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/272aa35e-0ea5-43bc-996a-d3f010dc94ac-combined-ca-bundle\") pod \"272aa35e-0ea5-43bc-996a-d3f010dc94ac\" (UID: \"272aa35e-0ea5-43bc-996a-d3f010dc94ac\") " Oct 01 14:05:23 crc kubenswrapper[4605]: I1001 14:05:23.034914 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/699a0e0b-af6b-4f0e-a7ae-7312d9a0b10c-logs\") pod \"699a0e0b-af6b-4f0e-a7ae-7312d9a0b10c\" (UID: \"699a0e0b-af6b-4f0e-a7ae-7312d9a0b10c\") " Oct 01 14:05:23 crc kubenswrapper[4605]: I1001 14:05:23.034977 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nhn2f\" (UniqueName: \"kubernetes.io/projected/699a0e0b-af6b-4f0e-a7ae-7312d9a0b10c-kube-api-access-nhn2f\") pod \"699a0e0b-af6b-4f0e-a7ae-7312d9a0b10c\" (UID: \"699a0e0b-af6b-4f0e-a7ae-7312d9a0b10c\") " Oct 01 14:05:23 crc kubenswrapper[4605]: I1001 14:05:23.035009 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/272aa35e-0ea5-43bc-996a-d3f010dc94ac-config-data\") pod \"272aa35e-0ea5-43bc-996a-d3f010dc94ac\" (UID: \"272aa35e-0ea5-43bc-996a-d3f010dc94ac\") " Oct 01 14:05:23 crc kubenswrapper[4605]: I1001 14:05:23.035051 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/699a0e0b-af6b-4f0e-a7ae-7312d9a0b10c-combined-ca-bundle\") pod \"699a0e0b-af6b-4f0e-a7ae-7312d9a0b10c\" (UID: \"699a0e0b-af6b-4f0e-a7ae-7312d9a0b10c\") " Oct 01 14:05:23 crc kubenswrapper[4605]: I1001 14:05:23.035152 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5xj9w\" (UniqueName: \"kubernetes.io/projected/272aa35e-0ea5-43bc-996a-d3f010dc94ac-kube-api-access-5xj9w\") pod \"272aa35e-0ea5-43bc-996a-d3f010dc94ac\" (UID: \"272aa35e-0ea5-43bc-996a-d3f010dc94ac\") " Oct 01 14:05:23 crc kubenswrapper[4605]: I1001 14:05:23.035274 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/699a0e0b-af6b-4f0e-a7ae-7312d9a0b10c-config-data\") pod \"699a0e0b-af6b-4f0e-a7ae-7312d9a0b10c\" (UID: \"699a0e0b-af6b-4f0e-a7ae-7312d9a0b10c\") " Oct 01 14:05:23 crc kubenswrapper[4605]: I1001 14:05:23.035905 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/699a0e0b-af6b-4f0e-a7ae-7312d9a0b10c-logs" (OuterVolumeSpecName: "logs") pod "699a0e0b-af6b-4f0e-a7ae-7312d9a0b10c" (UID: "699a0e0b-af6b-4f0e-a7ae-7312d9a0b10c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:05:23 crc kubenswrapper[4605]: I1001 14:05:23.036351 4605 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/699a0e0b-af6b-4f0e-a7ae-7312d9a0b10c-logs\") on node \"crc\" DevicePath \"\"" Oct 01 14:05:23 crc kubenswrapper[4605]: I1001 14:05:23.054992 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/272aa35e-0ea5-43bc-996a-d3f010dc94ac-kube-api-access-5xj9w" (OuterVolumeSpecName: "kube-api-access-5xj9w") pod "272aa35e-0ea5-43bc-996a-d3f010dc94ac" (UID: "272aa35e-0ea5-43bc-996a-d3f010dc94ac"). InnerVolumeSpecName "kube-api-access-5xj9w". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:05:23 crc kubenswrapper[4605]: I1001 14:05:23.055056 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/699a0e0b-af6b-4f0e-a7ae-7312d9a0b10c-kube-api-access-nhn2f" (OuterVolumeSpecName: "kube-api-access-nhn2f") pod "699a0e0b-af6b-4f0e-a7ae-7312d9a0b10c" (UID: "699a0e0b-af6b-4f0e-a7ae-7312d9a0b10c"). InnerVolumeSpecName "kube-api-access-nhn2f". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:05:23 crc kubenswrapper[4605]: I1001 14:05:23.065062 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/272aa35e-0ea5-43bc-996a-d3f010dc94ac-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "272aa35e-0ea5-43bc-996a-d3f010dc94ac" (UID: "272aa35e-0ea5-43bc-996a-d3f010dc94ac"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:05:23 crc kubenswrapper[4605]: I1001 14:05:23.065870 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/272aa35e-0ea5-43bc-996a-d3f010dc94ac-config-data" (OuterVolumeSpecName: "config-data") pod "272aa35e-0ea5-43bc-996a-d3f010dc94ac" (UID: "272aa35e-0ea5-43bc-996a-d3f010dc94ac"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:05:23 crc kubenswrapper[4605]: I1001 14:05:23.068464 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/699a0e0b-af6b-4f0e-a7ae-7312d9a0b10c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "699a0e0b-af6b-4f0e-a7ae-7312d9a0b10c" (UID: "699a0e0b-af6b-4f0e-a7ae-7312d9a0b10c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:05:23 crc kubenswrapper[4605]: I1001 14:05:23.074825 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/699a0e0b-af6b-4f0e-a7ae-7312d9a0b10c-config-data" (OuterVolumeSpecName: "config-data") pod "699a0e0b-af6b-4f0e-a7ae-7312d9a0b10c" (UID: "699a0e0b-af6b-4f0e-a7ae-7312d9a0b10c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:05:23 crc kubenswrapper[4605]: I1001 14:05:23.138377 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nhn2f\" (UniqueName: \"kubernetes.io/projected/699a0e0b-af6b-4f0e-a7ae-7312d9a0b10c-kube-api-access-nhn2f\") on node \"crc\" DevicePath \"\"" Oct 01 14:05:23 crc kubenswrapper[4605]: I1001 14:05:23.138413 4605 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/272aa35e-0ea5-43bc-996a-d3f010dc94ac-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 14:05:23 crc kubenswrapper[4605]: I1001 14:05:23.138423 4605 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/699a0e0b-af6b-4f0e-a7ae-7312d9a0b10c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 14:05:23 crc kubenswrapper[4605]: I1001 14:05:23.138432 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5xj9w\" (UniqueName: \"kubernetes.io/projected/272aa35e-0ea5-43bc-996a-d3f010dc94ac-kube-api-access-5xj9w\") on node \"crc\" DevicePath \"\"" Oct 01 14:05:23 crc kubenswrapper[4605]: I1001 14:05:23.138440 4605 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/699a0e0b-af6b-4f0e-a7ae-7312d9a0b10c-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 14:05:23 crc kubenswrapper[4605]: I1001 14:05:23.138449 4605 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/272aa35e-0ea5-43bc-996a-d3f010dc94ac-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 14:05:23 crc kubenswrapper[4605]: I1001 14:05:23.827937 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 01 14:05:23 crc kubenswrapper[4605]: I1001 14:05:23.827998 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 01 14:05:23 crc kubenswrapper[4605]: I1001 14:05:23.878653 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 01 14:05:23 crc kubenswrapper[4605]: I1001 14:05:23.896822 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Oct 01 14:05:23 crc kubenswrapper[4605]: I1001 14:05:23.907183 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 01 14:05:23 crc kubenswrapper[4605]: I1001 14:05:23.921218 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 01 14:05:23 crc kubenswrapper[4605]: I1001 14:05:23.923563 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 01 14:05:23 crc kubenswrapper[4605]: E1001 14:05:23.923985 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="699a0e0b-af6b-4f0e-a7ae-7312d9a0b10c" containerName="nova-metadata-metadata" Oct 01 14:05:23 crc kubenswrapper[4605]: I1001 14:05:23.923996 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="699a0e0b-af6b-4f0e-a7ae-7312d9a0b10c" containerName="nova-metadata-metadata" Oct 01 14:05:23 crc kubenswrapper[4605]: E1001 14:05:23.924022 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="699a0e0b-af6b-4f0e-a7ae-7312d9a0b10c" containerName="nova-metadata-log" Oct 01 14:05:23 crc kubenswrapper[4605]: I1001 14:05:23.924028 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="699a0e0b-af6b-4f0e-a7ae-7312d9a0b10c" containerName="nova-metadata-log" Oct 01 14:05:23 crc kubenswrapper[4605]: E1001 14:05:23.924050 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="272aa35e-0ea5-43bc-996a-d3f010dc94ac" containerName="nova-cell1-novncproxy-novncproxy" Oct 01 14:05:23 crc kubenswrapper[4605]: I1001 14:05:23.924058 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="272aa35e-0ea5-43bc-996a-d3f010dc94ac" containerName="nova-cell1-novncproxy-novncproxy" Oct 01 14:05:23 crc kubenswrapper[4605]: I1001 14:05:23.924296 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="699a0e0b-af6b-4f0e-a7ae-7312d9a0b10c" containerName="nova-metadata-log" Oct 01 14:05:23 crc kubenswrapper[4605]: I1001 14:05:23.924311 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="699a0e0b-af6b-4f0e-a7ae-7312d9a0b10c" containerName="nova-metadata-metadata" Oct 01 14:05:23 crc kubenswrapper[4605]: I1001 14:05:23.924323 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="272aa35e-0ea5-43bc-996a-d3f010dc94ac" containerName="nova-cell1-novncproxy-novncproxy" Oct 01 14:05:23 crc kubenswrapper[4605]: I1001 14:05:23.925413 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 01 14:05:23 crc kubenswrapper[4605]: I1001 14:05:23.929266 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Oct 01 14:05:23 crc kubenswrapper[4605]: I1001 14:05:23.929473 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 01 14:05:23 crc kubenswrapper[4605]: I1001 14:05:23.959013 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="272aa35e-0ea5-43bc-996a-d3f010dc94ac" path="/var/lib/kubelet/pods/272aa35e-0ea5-43bc-996a-d3f010dc94ac/volumes" Oct 01 14:05:23 crc kubenswrapper[4605]: I1001 14:05:23.960184 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="699a0e0b-af6b-4f0e-a7ae-7312d9a0b10c" path="/var/lib/kubelet/pods/699a0e0b-af6b-4f0e-a7ae-7312d9a0b10c/volumes" Oct 01 14:05:23 crc kubenswrapper[4605]: I1001 14:05:23.961010 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 01 14:05:23 crc kubenswrapper[4605]: I1001 14:05:23.961056 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 01 14:05:23 crc kubenswrapper[4605]: I1001 14:05:23.966705 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 01 14:05:23 crc kubenswrapper[4605]: I1001 14:05:23.969753 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Oct 01 14:05:23 crc kubenswrapper[4605]: I1001 14:05:23.971957 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Oct 01 14:05:23 crc kubenswrapper[4605]: I1001 14:05:23.972772 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 01 14:05:23 crc kubenswrapper[4605]: I1001 14:05:23.974129 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Oct 01 14:05:24 crc kubenswrapper[4605]: I1001 14:05:24.091945 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/4ed8f49c-0b65-433b-b5a9-848e4e920b86-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"4ed8f49c-0b65-433b-b5a9-848e4e920b86\") " pod="openstack/nova-metadata-0" Oct 01 14:05:24 crc kubenswrapper[4605]: I1001 14:05:24.092031 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02fa0bb4-5a25-43da-8f92-a8c0ca715032-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"02fa0bb4-5a25-43da-8f92-a8c0ca715032\") " pod="openstack/nova-cell1-novncproxy-0" Oct 01 14:05:24 crc kubenswrapper[4605]: I1001 14:05:24.092050 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/02fa0bb4-5a25-43da-8f92-a8c0ca715032-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"02fa0bb4-5a25-43da-8f92-a8c0ca715032\") " pod="openstack/nova-cell1-novncproxy-0" Oct 01 14:05:24 crc kubenswrapper[4605]: I1001 14:05:24.092076 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ed8f49c-0b65-433b-b5a9-848e4e920b86-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"4ed8f49c-0b65-433b-b5a9-848e4e920b86\") " pod="openstack/nova-metadata-0" Oct 01 14:05:24 crc kubenswrapper[4605]: I1001 14:05:24.092125 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qt8nv\" (UniqueName: \"kubernetes.io/projected/4ed8f49c-0b65-433b-b5a9-848e4e920b86-kube-api-access-qt8nv\") pod \"nova-metadata-0\" (UID: \"4ed8f49c-0b65-433b-b5a9-848e4e920b86\") " pod="openstack/nova-metadata-0" Oct 01 14:05:24 crc kubenswrapper[4605]: I1001 14:05:24.092142 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4ed8f49c-0b65-433b-b5a9-848e4e920b86-config-data\") pod \"nova-metadata-0\" (UID: \"4ed8f49c-0b65-433b-b5a9-848e4e920b86\") " pod="openstack/nova-metadata-0" Oct 01 14:05:24 crc kubenswrapper[4605]: I1001 14:05:24.092181 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02fa0bb4-5a25-43da-8f92-a8c0ca715032-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"02fa0bb4-5a25-43da-8f92-a8c0ca715032\") " pod="openstack/nova-cell1-novncproxy-0" Oct 01 14:05:24 crc kubenswrapper[4605]: I1001 14:05:24.092203 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/02fa0bb4-5a25-43da-8f92-a8c0ca715032-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"02fa0bb4-5a25-43da-8f92-a8c0ca715032\") " pod="openstack/nova-cell1-novncproxy-0" Oct 01 14:05:24 crc kubenswrapper[4605]: I1001 14:05:24.092284 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bpmb9\" (UniqueName: \"kubernetes.io/projected/02fa0bb4-5a25-43da-8f92-a8c0ca715032-kube-api-access-bpmb9\") pod \"nova-cell1-novncproxy-0\" (UID: \"02fa0bb4-5a25-43da-8f92-a8c0ca715032\") " pod="openstack/nova-cell1-novncproxy-0" Oct 01 14:05:24 crc kubenswrapper[4605]: I1001 14:05:24.092303 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4ed8f49c-0b65-433b-b5a9-848e4e920b86-logs\") pod \"nova-metadata-0\" (UID: \"4ed8f49c-0b65-433b-b5a9-848e4e920b86\") " pod="openstack/nova-metadata-0" Oct 01 14:05:24 crc kubenswrapper[4605]: I1001 14:05:24.193651 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/4ed8f49c-0b65-433b-b5a9-848e4e920b86-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"4ed8f49c-0b65-433b-b5a9-848e4e920b86\") " pod="openstack/nova-metadata-0" Oct 01 14:05:24 crc kubenswrapper[4605]: I1001 14:05:24.193759 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02fa0bb4-5a25-43da-8f92-a8c0ca715032-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"02fa0bb4-5a25-43da-8f92-a8c0ca715032\") " pod="openstack/nova-cell1-novncproxy-0" Oct 01 14:05:24 crc kubenswrapper[4605]: I1001 14:05:24.193785 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/02fa0bb4-5a25-43da-8f92-a8c0ca715032-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"02fa0bb4-5a25-43da-8f92-a8c0ca715032\") " pod="openstack/nova-cell1-novncproxy-0" Oct 01 14:05:24 crc kubenswrapper[4605]: I1001 14:05:24.193816 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ed8f49c-0b65-433b-b5a9-848e4e920b86-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"4ed8f49c-0b65-433b-b5a9-848e4e920b86\") " pod="openstack/nova-metadata-0" Oct 01 14:05:24 crc kubenswrapper[4605]: I1001 14:05:24.193857 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qt8nv\" (UniqueName: \"kubernetes.io/projected/4ed8f49c-0b65-433b-b5a9-848e4e920b86-kube-api-access-qt8nv\") pod \"nova-metadata-0\" (UID: \"4ed8f49c-0b65-433b-b5a9-848e4e920b86\") " pod="openstack/nova-metadata-0" Oct 01 14:05:24 crc kubenswrapper[4605]: I1001 14:05:24.194572 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4ed8f49c-0b65-433b-b5a9-848e4e920b86-config-data\") pod \"nova-metadata-0\" (UID: \"4ed8f49c-0b65-433b-b5a9-848e4e920b86\") " pod="openstack/nova-metadata-0" Oct 01 14:05:24 crc kubenswrapper[4605]: I1001 14:05:24.194615 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02fa0bb4-5a25-43da-8f92-a8c0ca715032-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"02fa0bb4-5a25-43da-8f92-a8c0ca715032\") " pod="openstack/nova-cell1-novncproxy-0" Oct 01 14:05:24 crc kubenswrapper[4605]: I1001 14:05:24.194635 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/02fa0bb4-5a25-43da-8f92-a8c0ca715032-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"02fa0bb4-5a25-43da-8f92-a8c0ca715032\") " pod="openstack/nova-cell1-novncproxy-0" Oct 01 14:05:24 crc kubenswrapper[4605]: I1001 14:05:24.194669 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bpmb9\" (UniqueName: \"kubernetes.io/projected/02fa0bb4-5a25-43da-8f92-a8c0ca715032-kube-api-access-bpmb9\") pod \"nova-cell1-novncproxy-0\" (UID: \"02fa0bb4-5a25-43da-8f92-a8c0ca715032\") " pod="openstack/nova-cell1-novncproxy-0" Oct 01 14:05:24 crc kubenswrapper[4605]: I1001 14:05:24.194693 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4ed8f49c-0b65-433b-b5a9-848e4e920b86-logs\") pod \"nova-metadata-0\" (UID: \"4ed8f49c-0b65-433b-b5a9-848e4e920b86\") " pod="openstack/nova-metadata-0" Oct 01 14:05:24 crc kubenswrapper[4605]: I1001 14:05:24.195078 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4ed8f49c-0b65-433b-b5a9-848e4e920b86-logs\") pod \"nova-metadata-0\" (UID: \"4ed8f49c-0b65-433b-b5a9-848e4e920b86\") " pod="openstack/nova-metadata-0" Oct 01 14:05:24 crc kubenswrapper[4605]: I1001 14:05:24.197969 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4ed8f49c-0b65-433b-b5a9-848e4e920b86-config-data\") pod \"nova-metadata-0\" (UID: \"4ed8f49c-0b65-433b-b5a9-848e4e920b86\") " pod="openstack/nova-metadata-0" Oct 01 14:05:24 crc kubenswrapper[4605]: I1001 14:05:24.198632 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/02fa0bb4-5a25-43da-8f92-a8c0ca715032-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"02fa0bb4-5a25-43da-8f92-a8c0ca715032\") " pod="openstack/nova-cell1-novncproxy-0" Oct 01 14:05:24 crc kubenswrapper[4605]: I1001 14:05:24.198781 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02fa0bb4-5a25-43da-8f92-a8c0ca715032-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"02fa0bb4-5a25-43da-8f92-a8c0ca715032\") " pod="openstack/nova-cell1-novncproxy-0" Oct 01 14:05:24 crc kubenswrapper[4605]: I1001 14:05:24.199248 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/4ed8f49c-0b65-433b-b5a9-848e4e920b86-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"4ed8f49c-0b65-433b-b5a9-848e4e920b86\") " pod="openstack/nova-metadata-0" Oct 01 14:05:24 crc kubenswrapper[4605]: I1001 14:05:24.199699 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ed8f49c-0b65-433b-b5a9-848e4e920b86-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"4ed8f49c-0b65-433b-b5a9-848e4e920b86\") " pod="openstack/nova-metadata-0" Oct 01 14:05:24 crc kubenswrapper[4605]: I1001 14:05:24.209060 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02fa0bb4-5a25-43da-8f92-a8c0ca715032-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"02fa0bb4-5a25-43da-8f92-a8c0ca715032\") " pod="openstack/nova-cell1-novncproxy-0" Oct 01 14:05:24 crc kubenswrapper[4605]: I1001 14:05:24.210360 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/02fa0bb4-5a25-43da-8f92-a8c0ca715032-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"02fa0bb4-5a25-43da-8f92-a8c0ca715032\") " pod="openstack/nova-cell1-novncproxy-0" Oct 01 14:05:24 crc kubenswrapper[4605]: I1001 14:05:24.213638 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bpmb9\" (UniqueName: \"kubernetes.io/projected/02fa0bb4-5a25-43da-8f92-a8c0ca715032-kube-api-access-bpmb9\") pod \"nova-cell1-novncproxy-0\" (UID: \"02fa0bb4-5a25-43da-8f92-a8c0ca715032\") " pod="openstack/nova-cell1-novncproxy-0" Oct 01 14:05:24 crc kubenswrapper[4605]: I1001 14:05:24.216963 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qt8nv\" (UniqueName: \"kubernetes.io/projected/4ed8f49c-0b65-433b-b5a9-848e4e920b86-kube-api-access-qt8nv\") pod \"nova-metadata-0\" (UID: \"4ed8f49c-0b65-433b-b5a9-848e4e920b86\") " pod="openstack/nova-metadata-0" Oct 01 14:05:24 crc kubenswrapper[4605]: I1001 14:05:24.262613 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 01 14:05:24 crc kubenswrapper[4605]: I1001 14:05:24.305478 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 01 14:05:24 crc kubenswrapper[4605]: I1001 14:05:24.732713 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 01 14:05:24 crc kubenswrapper[4605]: W1001 14:05:24.737132 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4ed8f49c_0b65_433b_b5a9_848e4e920b86.slice/crio-fe18d7499b8ec28a1b3d62220f6ff473ec2d955ec1062e4032d5d501c576e3d7 WatchSource:0}: Error finding container fe18d7499b8ec28a1b3d62220f6ff473ec2d955ec1062e4032d5d501c576e3d7: Status 404 returned error can't find the container with id fe18d7499b8ec28a1b3d62220f6ff473ec2d955ec1062e4032d5d501c576e3d7 Oct 01 14:05:24 crc kubenswrapper[4605]: I1001 14:05:24.823861 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 01 14:05:24 crc kubenswrapper[4605]: W1001 14:05:24.828198 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod02fa0bb4_5a25_43da_8f92_a8c0ca715032.slice/crio-47a7de81581cae1cee976bc3a5c20013ca0c175e68082bcd6bf0371b1e393557 WatchSource:0}: Error finding container 47a7de81581cae1cee976bc3a5c20013ca0c175e68082bcd6bf0371b1e393557: Status 404 returned error can't find the container with id 47a7de81581cae1cee976bc3a5c20013ca0c175e68082bcd6bf0371b1e393557 Oct 01 14:05:24 crc kubenswrapper[4605]: I1001 14:05:24.840306 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"02fa0bb4-5a25-43da-8f92-a8c0ca715032","Type":"ContainerStarted","Data":"47a7de81581cae1cee976bc3a5c20013ca0c175e68082bcd6bf0371b1e393557"} Oct 01 14:05:24 crc kubenswrapper[4605]: I1001 14:05:24.841796 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4ed8f49c-0b65-433b-b5a9-848e4e920b86","Type":"ContainerStarted","Data":"fe18d7499b8ec28a1b3d62220f6ff473ec2d955ec1062e4032d5d501c576e3d7"} Oct 01 14:05:25 crc kubenswrapper[4605]: I1001 14:05:25.856073 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4ed8f49c-0b65-433b-b5a9-848e4e920b86","Type":"ContainerStarted","Data":"a149e587cbddcef7a8848568bb4dfd43e0b6ad7f2522459bf8b23d76e2bfb58a"} Oct 01 14:05:25 crc kubenswrapper[4605]: I1001 14:05:25.857584 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4ed8f49c-0b65-433b-b5a9-848e4e920b86","Type":"ContainerStarted","Data":"78dae040228a1a7aafd3a86985cc81a66e35f3ecc520d4f57f5d67343d0cfff7"} Oct 01 14:05:25 crc kubenswrapper[4605]: I1001 14:05:25.861793 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"02fa0bb4-5a25-43da-8f92-a8c0ca715032","Type":"ContainerStarted","Data":"31bb39aad7dc6a6ec6b49b3809bdec573599db984836f85f2af6c5c407b71c0f"} Oct 01 14:05:25 crc kubenswrapper[4605]: I1001 14:05:25.919794 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.919776324 podStartE2EDuration="2.919776324s" podCreationTimestamp="2025-10-01 14:05:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 14:05:25.880620965 +0000 UTC m=+1248.624597173" watchObservedRunningTime="2025-10-01 14:05:25.919776324 +0000 UTC m=+1248.663752532" Oct 01 14:05:25 crc kubenswrapper[4605]: I1001 14:05:25.920573 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.920564814 podStartE2EDuration="2.920564814s" podCreationTimestamp="2025-10-01 14:05:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 14:05:25.917570298 +0000 UTC m=+1248.661546496" watchObservedRunningTime="2025-10-01 14:05:25.920564814 +0000 UTC m=+1248.664541022" Oct 01 14:05:26 crc kubenswrapper[4605]: I1001 14:05:26.272141 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 01 14:05:26 crc kubenswrapper[4605]: I1001 14:05:26.272526 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 01 14:05:26 crc kubenswrapper[4605]: I1001 14:05:26.275818 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 01 14:05:26 crc kubenswrapper[4605]: I1001 14:05:26.291057 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 01 14:05:26 crc kubenswrapper[4605]: I1001 14:05:26.872896 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 01 14:05:26 crc kubenswrapper[4605]: I1001 14:05:26.877867 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 01 14:05:27 crc kubenswrapper[4605]: I1001 14:05:27.101401 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-nb6z5"] Oct 01 14:05:27 crc kubenswrapper[4605]: I1001 14:05:27.108268 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59cf4bdb65-nb6z5" Oct 01 14:05:27 crc kubenswrapper[4605]: I1001 14:05:27.164019 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/887a809b-9c1a-4f0f-94e5-a2afcb1f914f-ovsdbserver-nb\") pod \"dnsmasq-dns-59cf4bdb65-nb6z5\" (UID: \"887a809b-9c1a-4f0f-94e5-a2afcb1f914f\") " pod="openstack/dnsmasq-dns-59cf4bdb65-nb6z5" Oct 01 14:05:27 crc kubenswrapper[4605]: I1001 14:05:27.164116 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/887a809b-9c1a-4f0f-94e5-a2afcb1f914f-dns-swift-storage-0\") pod \"dnsmasq-dns-59cf4bdb65-nb6z5\" (UID: \"887a809b-9c1a-4f0f-94e5-a2afcb1f914f\") " pod="openstack/dnsmasq-dns-59cf4bdb65-nb6z5" Oct 01 14:05:27 crc kubenswrapper[4605]: I1001 14:05:27.164149 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/887a809b-9c1a-4f0f-94e5-a2afcb1f914f-dns-svc\") pod \"dnsmasq-dns-59cf4bdb65-nb6z5\" (UID: \"887a809b-9c1a-4f0f-94e5-a2afcb1f914f\") " pod="openstack/dnsmasq-dns-59cf4bdb65-nb6z5" Oct 01 14:05:27 crc kubenswrapper[4605]: I1001 14:05:27.164183 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/887a809b-9c1a-4f0f-94e5-a2afcb1f914f-config\") pod \"dnsmasq-dns-59cf4bdb65-nb6z5\" (UID: \"887a809b-9c1a-4f0f-94e5-a2afcb1f914f\") " pod="openstack/dnsmasq-dns-59cf4bdb65-nb6z5" Oct 01 14:05:27 crc kubenswrapper[4605]: I1001 14:05:27.164272 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/887a809b-9c1a-4f0f-94e5-a2afcb1f914f-ovsdbserver-sb\") pod \"dnsmasq-dns-59cf4bdb65-nb6z5\" (UID: \"887a809b-9c1a-4f0f-94e5-a2afcb1f914f\") " pod="openstack/dnsmasq-dns-59cf4bdb65-nb6z5" Oct 01 14:05:27 crc kubenswrapper[4605]: I1001 14:05:27.164289 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tn7vk\" (UniqueName: \"kubernetes.io/projected/887a809b-9c1a-4f0f-94e5-a2afcb1f914f-kube-api-access-tn7vk\") pod \"dnsmasq-dns-59cf4bdb65-nb6z5\" (UID: \"887a809b-9c1a-4f0f-94e5-a2afcb1f914f\") " pod="openstack/dnsmasq-dns-59cf4bdb65-nb6z5" Oct 01 14:05:27 crc kubenswrapper[4605]: I1001 14:05:27.180216 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-nb6z5"] Oct 01 14:05:27 crc kubenswrapper[4605]: I1001 14:05:27.265252 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/887a809b-9c1a-4f0f-94e5-a2afcb1f914f-dns-swift-storage-0\") pod \"dnsmasq-dns-59cf4bdb65-nb6z5\" (UID: \"887a809b-9c1a-4f0f-94e5-a2afcb1f914f\") " pod="openstack/dnsmasq-dns-59cf4bdb65-nb6z5" Oct 01 14:05:27 crc kubenswrapper[4605]: I1001 14:05:27.265315 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/887a809b-9c1a-4f0f-94e5-a2afcb1f914f-dns-svc\") pod \"dnsmasq-dns-59cf4bdb65-nb6z5\" (UID: \"887a809b-9c1a-4f0f-94e5-a2afcb1f914f\") " pod="openstack/dnsmasq-dns-59cf4bdb65-nb6z5" Oct 01 14:05:27 crc kubenswrapper[4605]: I1001 14:05:27.265347 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/887a809b-9c1a-4f0f-94e5-a2afcb1f914f-config\") pod \"dnsmasq-dns-59cf4bdb65-nb6z5\" (UID: \"887a809b-9c1a-4f0f-94e5-a2afcb1f914f\") " pod="openstack/dnsmasq-dns-59cf4bdb65-nb6z5" Oct 01 14:05:27 crc kubenswrapper[4605]: I1001 14:05:27.265400 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/887a809b-9c1a-4f0f-94e5-a2afcb1f914f-ovsdbserver-sb\") pod \"dnsmasq-dns-59cf4bdb65-nb6z5\" (UID: \"887a809b-9c1a-4f0f-94e5-a2afcb1f914f\") " pod="openstack/dnsmasq-dns-59cf4bdb65-nb6z5" Oct 01 14:05:27 crc kubenswrapper[4605]: I1001 14:05:27.265421 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tn7vk\" (UniqueName: \"kubernetes.io/projected/887a809b-9c1a-4f0f-94e5-a2afcb1f914f-kube-api-access-tn7vk\") pod \"dnsmasq-dns-59cf4bdb65-nb6z5\" (UID: \"887a809b-9c1a-4f0f-94e5-a2afcb1f914f\") " pod="openstack/dnsmasq-dns-59cf4bdb65-nb6z5" Oct 01 14:05:27 crc kubenswrapper[4605]: I1001 14:05:27.265469 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/887a809b-9c1a-4f0f-94e5-a2afcb1f914f-ovsdbserver-nb\") pod \"dnsmasq-dns-59cf4bdb65-nb6z5\" (UID: \"887a809b-9c1a-4f0f-94e5-a2afcb1f914f\") " pod="openstack/dnsmasq-dns-59cf4bdb65-nb6z5" Oct 01 14:05:27 crc kubenswrapper[4605]: I1001 14:05:27.266330 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/887a809b-9c1a-4f0f-94e5-a2afcb1f914f-ovsdbserver-nb\") pod \"dnsmasq-dns-59cf4bdb65-nb6z5\" (UID: \"887a809b-9c1a-4f0f-94e5-a2afcb1f914f\") " pod="openstack/dnsmasq-dns-59cf4bdb65-nb6z5" Oct 01 14:05:27 crc kubenswrapper[4605]: I1001 14:05:27.266904 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/887a809b-9c1a-4f0f-94e5-a2afcb1f914f-dns-swift-storage-0\") pod \"dnsmasq-dns-59cf4bdb65-nb6z5\" (UID: \"887a809b-9c1a-4f0f-94e5-a2afcb1f914f\") " pod="openstack/dnsmasq-dns-59cf4bdb65-nb6z5" Oct 01 14:05:27 crc kubenswrapper[4605]: I1001 14:05:27.267570 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/887a809b-9c1a-4f0f-94e5-a2afcb1f914f-dns-svc\") pod \"dnsmasq-dns-59cf4bdb65-nb6z5\" (UID: \"887a809b-9c1a-4f0f-94e5-a2afcb1f914f\") " pod="openstack/dnsmasq-dns-59cf4bdb65-nb6z5" Oct 01 14:05:27 crc kubenswrapper[4605]: I1001 14:05:27.267696 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/887a809b-9c1a-4f0f-94e5-a2afcb1f914f-ovsdbserver-sb\") pod \"dnsmasq-dns-59cf4bdb65-nb6z5\" (UID: \"887a809b-9c1a-4f0f-94e5-a2afcb1f914f\") " pod="openstack/dnsmasq-dns-59cf4bdb65-nb6z5" Oct 01 14:05:27 crc kubenswrapper[4605]: I1001 14:05:27.268243 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/887a809b-9c1a-4f0f-94e5-a2afcb1f914f-config\") pod \"dnsmasq-dns-59cf4bdb65-nb6z5\" (UID: \"887a809b-9c1a-4f0f-94e5-a2afcb1f914f\") " pod="openstack/dnsmasq-dns-59cf4bdb65-nb6z5" Oct 01 14:05:27 crc kubenswrapper[4605]: I1001 14:05:27.293634 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tn7vk\" (UniqueName: \"kubernetes.io/projected/887a809b-9c1a-4f0f-94e5-a2afcb1f914f-kube-api-access-tn7vk\") pod \"dnsmasq-dns-59cf4bdb65-nb6z5\" (UID: \"887a809b-9c1a-4f0f-94e5-a2afcb1f914f\") " pod="openstack/dnsmasq-dns-59cf4bdb65-nb6z5" Oct 01 14:05:27 crc kubenswrapper[4605]: I1001 14:05:27.443691 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59cf4bdb65-nb6z5" Oct 01 14:05:28 crc kubenswrapper[4605]: I1001 14:05:28.006020 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-nb6z5"] Oct 01 14:05:28 crc kubenswrapper[4605]: I1001 14:05:28.887443 4605 generic.go:334] "Generic (PLEG): container finished" podID="887a809b-9c1a-4f0f-94e5-a2afcb1f914f" containerID="20bf734c72123fa46a78c01860a9373ecf8e085bb6c5bd494542ba9d8a44dd23" exitCode=0 Oct 01 14:05:28 crc kubenswrapper[4605]: I1001 14:05:28.887541 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59cf4bdb65-nb6z5" event={"ID":"887a809b-9c1a-4f0f-94e5-a2afcb1f914f","Type":"ContainerDied","Data":"20bf734c72123fa46a78c01860a9373ecf8e085bb6c5bd494542ba9d8a44dd23"} Oct 01 14:05:28 crc kubenswrapper[4605]: I1001 14:05:28.887952 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59cf4bdb65-nb6z5" event={"ID":"887a809b-9c1a-4f0f-94e5-a2afcb1f914f","Type":"ContainerStarted","Data":"f28562bf48a3ee0f9a865cd7719a52fcf1600d92fada41e7a4d38c070d8c8b3d"} Oct 01 14:05:29 crc kubenswrapper[4605]: I1001 14:05:29.268234 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 01 14:05:29 crc kubenswrapper[4605]: I1001 14:05:29.268575 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 01 14:05:29 crc kubenswrapper[4605]: I1001 14:05:29.305841 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Oct 01 14:05:29 crc kubenswrapper[4605]: I1001 14:05:29.430217 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 01 14:05:29 crc kubenswrapper[4605]: I1001 14:05:29.896444 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59cf4bdb65-nb6z5" event={"ID":"887a809b-9c1a-4f0f-94e5-a2afcb1f914f","Type":"ContainerStarted","Data":"c2f92381a490a78b3e7e468e4a7466a1dada9f38ffe8ffd065bc1bb936a99cbd"} Oct 01 14:05:29 crc kubenswrapper[4605]: I1001 14:05:29.896579 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="704f2039-49de-4306-a09a-9e726133ee40" containerName="nova-api-log" containerID="cri-o://d34cf31d6cca43611bb76b1b906854a828a81538116a8410d9cec60fd5801640" gracePeriod=30 Oct 01 14:05:29 crc kubenswrapper[4605]: I1001 14:05:29.896649 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="704f2039-49de-4306-a09a-9e726133ee40" containerName="nova-api-api" containerID="cri-o://a376c7bb725ce7533bab5b7698ec37ca2dd22eabc8a57e82ee100f9dbfc8a34b" gracePeriod=30 Oct 01 14:05:29 crc kubenswrapper[4605]: I1001 14:05:29.937842 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 01 14:05:29 crc kubenswrapper[4605]: I1001 14:05:29.938389 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="91f37707-14d5-4c53-9a0a-86d8249d42cf" containerName="ceilometer-central-agent" containerID="cri-o://d0c4561f6b101474ce082218a2af0c419cdd40f340e43c26ce7a8beacbfbc15c" gracePeriod=30 Oct 01 14:05:29 crc kubenswrapper[4605]: I1001 14:05:29.938498 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="91f37707-14d5-4c53-9a0a-86d8249d42cf" containerName="ceilometer-notification-agent" containerID="cri-o://605cbd1ab37eeb3b0c8b476382eb418eb1fce8f12a23399a2451c10c36397891" gracePeriod=30 Oct 01 14:05:29 crc kubenswrapper[4605]: I1001 14:05:29.938475 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="91f37707-14d5-4c53-9a0a-86d8249d42cf" containerName="proxy-httpd" containerID="cri-o://c78d9bfa7d34ba0bc5ef17adc381e76099de52cd66a4b4d4adb8eb12967926ae" gracePeriod=30 Oct 01 14:05:29 crc kubenswrapper[4605]: I1001 14:05:29.938480 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="91f37707-14d5-4c53-9a0a-86d8249d42cf" containerName="sg-core" containerID="cri-o://6ca09ee632ed58998ef472386027b7e242212810aa3dae0a0f13278563b4a6b6" gracePeriod=30 Oct 01 14:05:29 crc kubenswrapper[4605]: I1001 14:05:29.949570 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-59cf4bdb65-nb6z5" podStartSLOduration=2.949549722 podStartE2EDuration="2.949549722s" podCreationTimestamp="2025-10-01 14:05:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 14:05:29.924460378 +0000 UTC m=+1252.668436586" watchObservedRunningTime="2025-10-01 14:05:29.949549722 +0000 UTC m=+1252.693525930" Oct 01 14:05:29 crc kubenswrapper[4605]: I1001 14:05:29.951575 4605 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="91f37707-14d5-4c53-9a0a-86d8249d42cf" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.0.195:3000/\": EOF" Oct 01 14:05:30 crc kubenswrapper[4605]: I1001 14:05:30.906669 4605 generic.go:334] "Generic (PLEG): container finished" podID="704f2039-49de-4306-a09a-9e726133ee40" containerID="d34cf31d6cca43611bb76b1b906854a828a81538116a8410d9cec60fd5801640" exitCode=143 Oct 01 14:05:30 crc kubenswrapper[4605]: I1001 14:05:30.906746 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"704f2039-49de-4306-a09a-9e726133ee40","Type":"ContainerDied","Data":"d34cf31d6cca43611bb76b1b906854a828a81538116a8410d9cec60fd5801640"} Oct 01 14:05:30 crc kubenswrapper[4605]: I1001 14:05:30.909153 4605 generic.go:334] "Generic (PLEG): container finished" podID="91f37707-14d5-4c53-9a0a-86d8249d42cf" containerID="c78d9bfa7d34ba0bc5ef17adc381e76099de52cd66a4b4d4adb8eb12967926ae" exitCode=0 Oct 01 14:05:30 crc kubenswrapper[4605]: I1001 14:05:30.909181 4605 generic.go:334] "Generic (PLEG): container finished" podID="91f37707-14d5-4c53-9a0a-86d8249d42cf" containerID="6ca09ee632ed58998ef472386027b7e242212810aa3dae0a0f13278563b4a6b6" exitCode=2 Oct 01 14:05:30 crc kubenswrapper[4605]: I1001 14:05:30.909189 4605 generic.go:334] "Generic (PLEG): container finished" podID="91f37707-14d5-4c53-9a0a-86d8249d42cf" containerID="d0c4561f6b101474ce082218a2af0c419cdd40f340e43c26ce7a8beacbfbc15c" exitCode=0 Oct 01 14:05:30 crc kubenswrapper[4605]: I1001 14:05:30.909226 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"91f37707-14d5-4c53-9a0a-86d8249d42cf","Type":"ContainerDied","Data":"c78d9bfa7d34ba0bc5ef17adc381e76099de52cd66a4b4d4adb8eb12967926ae"} Oct 01 14:05:30 crc kubenswrapper[4605]: I1001 14:05:30.909257 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"91f37707-14d5-4c53-9a0a-86d8249d42cf","Type":"ContainerDied","Data":"6ca09ee632ed58998ef472386027b7e242212810aa3dae0a0f13278563b4a6b6"} Oct 01 14:05:30 crc kubenswrapper[4605]: I1001 14:05:30.909269 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"91f37707-14d5-4c53-9a0a-86d8249d42cf","Type":"ContainerDied","Data":"d0c4561f6b101474ce082218a2af0c419cdd40f340e43c26ce7a8beacbfbc15c"} Oct 01 14:05:30 crc kubenswrapper[4605]: I1001 14:05:30.909373 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-59cf4bdb65-nb6z5" Oct 01 14:05:31 crc kubenswrapper[4605]: I1001 14:05:31.490778 4605 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="91f37707-14d5-4c53-9a0a-86d8249d42cf" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.0.195:3000/\": dial tcp 10.217.0.195:3000: connect: connection refused" Oct 01 14:05:33 crc kubenswrapper[4605]: I1001 14:05:33.475048 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 01 14:05:33 crc kubenswrapper[4605]: I1001 14:05:33.611710 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-59gp9\" (UniqueName: \"kubernetes.io/projected/704f2039-49de-4306-a09a-9e726133ee40-kube-api-access-59gp9\") pod \"704f2039-49de-4306-a09a-9e726133ee40\" (UID: \"704f2039-49de-4306-a09a-9e726133ee40\") " Oct 01 14:05:33 crc kubenswrapper[4605]: I1001 14:05:33.611834 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/704f2039-49de-4306-a09a-9e726133ee40-combined-ca-bundle\") pod \"704f2039-49de-4306-a09a-9e726133ee40\" (UID: \"704f2039-49de-4306-a09a-9e726133ee40\") " Oct 01 14:05:33 crc kubenswrapper[4605]: I1001 14:05:33.611885 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/704f2039-49de-4306-a09a-9e726133ee40-logs\") pod \"704f2039-49de-4306-a09a-9e726133ee40\" (UID: \"704f2039-49de-4306-a09a-9e726133ee40\") " Oct 01 14:05:33 crc kubenswrapper[4605]: I1001 14:05:33.611931 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/704f2039-49de-4306-a09a-9e726133ee40-config-data\") pod \"704f2039-49de-4306-a09a-9e726133ee40\" (UID: \"704f2039-49de-4306-a09a-9e726133ee40\") " Oct 01 14:05:33 crc kubenswrapper[4605]: I1001 14:05:33.612331 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/704f2039-49de-4306-a09a-9e726133ee40-logs" (OuterVolumeSpecName: "logs") pod "704f2039-49de-4306-a09a-9e726133ee40" (UID: "704f2039-49de-4306-a09a-9e726133ee40"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:05:33 crc kubenswrapper[4605]: I1001 14:05:33.619471 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/704f2039-49de-4306-a09a-9e726133ee40-kube-api-access-59gp9" (OuterVolumeSpecName: "kube-api-access-59gp9") pod "704f2039-49de-4306-a09a-9e726133ee40" (UID: "704f2039-49de-4306-a09a-9e726133ee40"). InnerVolumeSpecName "kube-api-access-59gp9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:05:33 crc kubenswrapper[4605]: I1001 14:05:33.645125 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/704f2039-49de-4306-a09a-9e726133ee40-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "704f2039-49de-4306-a09a-9e726133ee40" (UID: "704f2039-49de-4306-a09a-9e726133ee40"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:05:33 crc kubenswrapper[4605]: I1001 14:05:33.659906 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/704f2039-49de-4306-a09a-9e726133ee40-config-data" (OuterVolumeSpecName: "config-data") pod "704f2039-49de-4306-a09a-9e726133ee40" (UID: "704f2039-49de-4306-a09a-9e726133ee40"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:05:33 crc kubenswrapper[4605]: I1001 14:05:33.713731 4605 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/704f2039-49de-4306-a09a-9e726133ee40-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 14:05:33 crc kubenswrapper[4605]: I1001 14:05:33.713764 4605 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/704f2039-49de-4306-a09a-9e726133ee40-logs\") on node \"crc\" DevicePath \"\"" Oct 01 14:05:33 crc kubenswrapper[4605]: I1001 14:05:33.713776 4605 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/704f2039-49de-4306-a09a-9e726133ee40-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 14:05:33 crc kubenswrapper[4605]: I1001 14:05:33.713785 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-59gp9\" (UniqueName: \"kubernetes.io/projected/704f2039-49de-4306-a09a-9e726133ee40-kube-api-access-59gp9\") on node \"crc\" DevicePath \"\"" Oct 01 14:05:33 crc kubenswrapper[4605]: I1001 14:05:33.942339 4605 generic.go:334] "Generic (PLEG): container finished" podID="704f2039-49de-4306-a09a-9e726133ee40" containerID="a376c7bb725ce7533bab5b7698ec37ca2dd22eabc8a57e82ee100f9dbfc8a34b" exitCode=0 Oct 01 14:05:33 crc kubenswrapper[4605]: I1001 14:05:33.942380 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"704f2039-49de-4306-a09a-9e726133ee40","Type":"ContainerDied","Data":"a376c7bb725ce7533bab5b7698ec37ca2dd22eabc8a57e82ee100f9dbfc8a34b"} Oct 01 14:05:33 crc kubenswrapper[4605]: I1001 14:05:33.942402 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"704f2039-49de-4306-a09a-9e726133ee40","Type":"ContainerDied","Data":"44baeabb276acb2454b1e1b39fb1203518782c012fd155af8a0beac5c987f78e"} Oct 01 14:05:33 crc kubenswrapper[4605]: I1001 14:05:33.942412 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 01 14:05:33 crc kubenswrapper[4605]: I1001 14:05:33.942448 4605 scope.go:117] "RemoveContainer" containerID="a376c7bb725ce7533bab5b7698ec37ca2dd22eabc8a57e82ee100f9dbfc8a34b" Oct 01 14:05:33 crc kubenswrapper[4605]: I1001 14:05:33.972047 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 01 14:05:33 crc kubenswrapper[4605]: I1001 14:05:33.991239 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Oct 01 14:05:34 crc kubenswrapper[4605]: I1001 14:05:34.007448 4605 scope.go:117] "RemoveContainer" containerID="d34cf31d6cca43611bb76b1b906854a828a81538116a8410d9cec60fd5801640" Oct 01 14:05:34 crc kubenswrapper[4605]: I1001 14:05:34.008874 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 01 14:05:34 crc kubenswrapper[4605]: E1001 14:05:34.009275 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="704f2039-49de-4306-a09a-9e726133ee40" containerName="nova-api-log" Oct 01 14:05:34 crc kubenswrapper[4605]: I1001 14:05:34.009290 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="704f2039-49de-4306-a09a-9e726133ee40" containerName="nova-api-log" Oct 01 14:05:34 crc kubenswrapper[4605]: E1001 14:05:34.009300 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="704f2039-49de-4306-a09a-9e726133ee40" containerName="nova-api-api" Oct 01 14:05:34 crc kubenswrapper[4605]: I1001 14:05:34.009305 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="704f2039-49de-4306-a09a-9e726133ee40" containerName="nova-api-api" Oct 01 14:05:34 crc kubenswrapper[4605]: I1001 14:05:34.009483 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="704f2039-49de-4306-a09a-9e726133ee40" containerName="nova-api-api" Oct 01 14:05:34 crc kubenswrapper[4605]: I1001 14:05:34.009507 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="704f2039-49de-4306-a09a-9e726133ee40" containerName="nova-api-log" Oct 01 14:05:34 crc kubenswrapper[4605]: I1001 14:05:34.010440 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 01 14:05:34 crc kubenswrapper[4605]: I1001 14:05:34.012895 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Oct 01 14:05:34 crc kubenswrapper[4605]: I1001 14:05:34.013187 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Oct 01 14:05:34 crc kubenswrapper[4605]: I1001 14:05:34.013397 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 01 14:05:34 crc kubenswrapper[4605]: I1001 14:05:34.018855 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65f69992-26f8-4f45-beeb-3aee499bf19e-config-data\") pod \"nova-api-0\" (UID: \"65f69992-26f8-4f45-beeb-3aee499bf19e\") " pod="openstack/nova-api-0" Oct 01 14:05:34 crc kubenswrapper[4605]: I1001 14:05:34.018906 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/65f69992-26f8-4f45-beeb-3aee499bf19e-internal-tls-certs\") pod \"nova-api-0\" (UID: \"65f69992-26f8-4f45-beeb-3aee499bf19e\") " pod="openstack/nova-api-0" Oct 01 14:05:34 crc kubenswrapper[4605]: I1001 14:05:34.018927 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/65f69992-26f8-4f45-beeb-3aee499bf19e-public-tls-certs\") pod \"nova-api-0\" (UID: \"65f69992-26f8-4f45-beeb-3aee499bf19e\") " pod="openstack/nova-api-0" Oct 01 14:05:34 crc kubenswrapper[4605]: I1001 14:05:34.018992 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65f69992-26f8-4f45-beeb-3aee499bf19e-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"65f69992-26f8-4f45-beeb-3aee499bf19e\") " pod="openstack/nova-api-0" Oct 01 14:05:34 crc kubenswrapper[4605]: I1001 14:05:34.019137 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6vxl8\" (UniqueName: \"kubernetes.io/projected/65f69992-26f8-4f45-beeb-3aee499bf19e-kube-api-access-6vxl8\") pod \"nova-api-0\" (UID: \"65f69992-26f8-4f45-beeb-3aee499bf19e\") " pod="openstack/nova-api-0" Oct 01 14:05:34 crc kubenswrapper[4605]: I1001 14:05:34.019167 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/65f69992-26f8-4f45-beeb-3aee499bf19e-logs\") pod \"nova-api-0\" (UID: \"65f69992-26f8-4f45-beeb-3aee499bf19e\") " pod="openstack/nova-api-0" Oct 01 14:05:34 crc kubenswrapper[4605]: I1001 14:05:34.047026 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 01 14:05:34 crc kubenswrapper[4605]: I1001 14:05:34.050010 4605 scope.go:117] "RemoveContainer" containerID="a376c7bb725ce7533bab5b7698ec37ca2dd22eabc8a57e82ee100f9dbfc8a34b" Oct 01 14:05:34 crc kubenswrapper[4605]: E1001 14:05:34.051280 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a376c7bb725ce7533bab5b7698ec37ca2dd22eabc8a57e82ee100f9dbfc8a34b\": container with ID starting with a376c7bb725ce7533bab5b7698ec37ca2dd22eabc8a57e82ee100f9dbfc8a34b not found: ID does not exist" containerID="a376c7bb725ce7533bab5b7698ec37ca2dd22eabc8a57e82ee100f9dbfc8a34b" Oct 01 14:05:34 crc kubenswrapper[4605]: I1001 14:05:34.051367 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a376c7bb725ce7533bab5b7698ec37ca2dd22eabc8a57e82ee100f9dbfc8a34b"} err="failed to get container status \"a376c7bb725ce7533bab5b7698ec37ca2dd22eabc8a57e82ee100f9dbfc8a34b\": rpc error: code = NotFound desc = could not find container \"a376c7bb725ce7533bab5b7698ec37ca2dd22eabc8a57e82ee100f9dbfc8a34b\": container with ID starting with a376c7bb725ce7533bab5b7698ec37ca2dd22eabc8a57e82ee100f9dbfc8a34b not found: ID does not exist" Oct 01 14:05:34 crc kubenswrapper[4605]: I1001 14:05:34.051536 4605 scope.go:117] "RemoveContainer" containerID="d34cf31d6cca43611bb76b1b906854a828a81538116a8410d9cec60fd5801640" Oct 01 14:05:34 crc kubenswrapper[4605]: E1001 14:05:34.051992 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d34cf31d6cca43611bb76b1b906854a828a81538116a8410d9cec60fd5801640\": container with ID starting with d34cf31d6cca43611bb76b1b906854a828a81538116a8410d9cec60fd5801640 not found: ID does not exist" containerID="d34cf31d6cca43611bb76b1b906854a828a81538116a8410d9cec60fd5801640" Oct 01 14:05:34 crc kubenswrapper[4605]: I1001 14:05:34.052031 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d34cf31d6cca43611bb76b1b906854a828a81538116a8410d9cec60fd5801640"} err="failed to get container status \"d34cf31d6cca43611bb76b1b906854a828a81538116a8410d9cec60fd5801640\": rpc error: code = NotFound desc = could not find container \"d34cf31d6cca43611bb76b1b906854a828a81538116a8410d9cec60fd5801640\": container with ID starting with d34cf31d6cca43611bb76b1b906854a828a81538116a8410d9cec60fd5801640 not found: ID does not exist" Oct 01 14:05:34 crc kubenswrapper[4605]: I1001 14:05:34.120514 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/65f69992-26f8-4f45-beeb-3aee499bf19e-internal-tls-certs\") pod \"nova-api-0\" (UID: \"65f69992-26f8-4f45-beeb-3aee499bf19e\") " pod="openstack/nova-api-0" Oct 01 14:05:34 crc kubenswrapper[4605]: I1001 14:05:34.120543 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/65f69992-26f8-4f45-beeb-3aee499bf19e-public-tls-certs\") pod \"nova-api-0\" (UID: \"65f69992-26f8-4f45-beeb-3aee499bf19e\") " pod="openstack/nova-api-0" Oct 01 14:05:34 crc kubenswrapper[4605]: I1001 14:05:34.120633 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65f69992-26f8-4f45-beeb-3aee499bf19e-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"65f69992-26f8-4f45-beeb-3aee499bf19e\") " pod="openstack/nova-api-0" Oct 01 14:05:34 crc kubenswrapper[4605]: I1001 14:05:34.120729 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6vxl8\" (UniqueName: \"kubernetes.io/projected/65f69992-26f8-4f45-beeb-3aee499bf19e-kube-api-access-6vxl8\") pod \"nova-api-0\" (UID: \"65f69992-26f8-4f45-beeb-3aee499bf19e\") " pod="openstack/nova-api-0" Oct 01 14:05:34 crc kubenswrapper[4605]: I1001 14:05:34.120748 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/65f69992-26f8-4f45-beeb-3aee499bf19e-logs\") pod \"nova-api-0\" (UID: \"65f69992-26f8-4f45-beeb-3aee499bf19e\") " pod="openstack/nova-api-0" Oct 01 14:05:34 crc kubenswrapper[4605]: I1001 14:05:34.120786 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65f69992-26f8-4f45-beeb-3aee499bf19e-config-data\") pod \"nova-api-0\" (UID: \"65f69992-26f8-4f45-beeb-3aee499bf19e\") " pod="openstack/nova-api-0" Oct 01 14:05:34 crc kubenswrapper[4605]: I1001 14:05:34.122525 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/65f69992-26f8-4f45-beeb-3aee499bf19e-logs\") pod \"nova-api-0\" (UID: \"65f69992-26f8-4f45-beeb-3aee499bf19e\") " pod="openstack/nova-api-0" Oct 01 14:05:34 crc kubenswrapper[4605]: I1001 14:05:34.124186 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/65f69992-26f8-4f45-beeb-3aee499bf19e-public-tls-certs\") pod \"nova-api-0\" (UID: \"65f69992-26f8-4f45-beeb-3aee499bf19e\") " pod="openstack/nova-api-0" Oct 01 14:05:34 crc kubenswrapper[4605]: I1001 14:05:34.127981 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65f69992-26f8-4f45-beeb-3aee499bf19e-config-data\") pod \"nova-api-0\" (UID: \"65f69992-26f8-4f45-beeb-3aee499bf19e\") " pod="openstack/nova-api-0" Oct 01 14:05:34 crc kubenswrapper[4605]: I1001 14:05:34.129449 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/65f69992-26f8-4f45-beeb-3aee499bf19e-internal-tls-certs\") pod \"nova-api-0\" (UID: \"65f69992-26f8-4f45-beeb-3aee499bf19e\") " pod="openstack/nova-api-0" Oct 01 14:05:34 crc kubenswrapper[4605]: I1001 14:05:34.137666 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65f69992-26f8-4f45-beeb-3aee499bf19e-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"65f69992-26f8-4f45-beeb-3aee499bf19e\") " pod="openstack/nova-api-0" Oct 01 14:05:34 crc kubenswrapper[4605]: I1001 14:05:34.142647 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6vxl8\" (UniqueName: \"kubernetes.io/projected/65f69992-26f8-4f45-beeb-3aee499bf19e-kube-api-access-6vxl8\") pod \"nova-api-0\" (UID: \"65f69992-26f8-4f45-beeb-3aee499bf19e\") " pod="openstack/nova-api-0" Oct 01 14:05:34 crc kubenswrapper[4605]: I1001 14:05:34.268174 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 01 14:05:34 crc kubenswrapper[4605]: I1001 14:05:34.268505 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 01 14:05:34 crc kubenswrapper[4605]: I1001 14:05:34.306189 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Oct 01 14:05:34 crc kubenswrapper[4605]: I1001 14:05:34.326045 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Oct 01 14:05:34 crc kubenswrapper[4605]: I1001 14:05:34.350299 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 01 14:05:34 crc kubenswrapper[4605]: I1001 14:05:34.813797 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 01 14:05:34 crc kubenswrapper[4605]: I1001 14:05:34.976298 4605 generic.go:334] "Generic (PLEG): container finished" podID="91f37707-14d5-4c53-9a0a-86d8249d42cf" containerID="605cbd1ab37eeb3b0c8b476382eb418eb1fce8f12a23399a2451c10c36397891" exitCode=0 Oct 01 14:05:34 crc kubenswrapper[4605]: I1001 14:05:34.976404 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"91f37707-14d5-4c53-9a0a-86d8249d42cf","Type":"ContainerDied","Data":"605cbd1ab37eeb3b0c8b476382eb418eb1fce8f12a23399a2451c10c36397891"} Oct 01 14:05:34 crc kubenswrapper[4605]: I1001 14:05:34.978924 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"65f69992-26f8-4f45-beeb-3aee499bf19e","Type":"ContainerStarted","Data":"52701961898fd0e6255b2f328591822358003359f44524b105ac32904ed1bba8"} Oct 01 14:05:34 crc kubenswrapper[4605]: I1001 14:05:34.994452 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Oct 01 14:05:35 crc kubenswrapper[4605]: I1001 14:05:35.047338 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 14:05:35 crc kubenswrapper[4605]: I1001 14:05:35.145691 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/91f37707-14d5-4c53-9a0a-86d8249d42cf-log-httpd\") pod \"91f37707-14d5-4c53-9a0a-86d8249d42cf\" (UID: \"91f37707-14d5-4c53-9a0a-86d8249d42cf\") " Oct 01 14:05:35 crc kubenswrapper[4605]: I1001 14:05:35.146023 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jcl85\" (UniqueName: \"kubernetes.io/projected/91f37707-14d5-4c53-9a0a-86d8249d42cf-kube-api-access-jcl85\") pod \"91f37707-14d5-4c53-9a0a-86d8249d42cf\" (UID: \"91f37707-14d5-4c53-9a0a-86d8249d42cf\") " Oct 01 14:05:35 crc kubenswrapper[4605]: I1001 14:05:35.146117 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91f37707-14d5-4c53-9a0a-86d8249d42cf-config-data\") pod \"91f37707-14d5-4c53-9a0a-86d8249d42cf\" (UID: \"91f37707-14d5-4c53-9a0a-86d8249d42cf\") " Oct 01 14:05:35 crc kubenswrapper[4605]: I1001 14:05:35.146194 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/91f37707-14d5-4c53-9a0a-86d8249d42cf-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "91f37707-14d5-4c53-9a0a-86d8249d42cf" (UID: "91f37707-14d5-4c53-9a0a-86d8249d42cf"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:05:35 crc kubenswrapper[4605]: I1001 14:05:35.146210 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/91f37707-14d5-4c53-9a0a-86d8249d42cf-sg-core-conf-yaml\") pod \"91f37707-14d5-4c53-9a0a-86d8249d42cf\" (UID: \"91f37707-14d5-4c53-9a0a-86d8249d42cf\") " Oct 01 14:05:35 crc kubenswrapper[4605]: I1001 14:05:35.146358 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/91f37707-14d5-4c53-9a0a-86d8249d42cf-scripts\") pod \"91f37707-14d5-4c53-9a0a-86d8249d42cf\" (UID: \"91f37707-14d5-4c53-9a0a-86d8249d42cf\") " Oct 01 14:05:35 crc kubenswrapper[4605]: I1001 14:05:35.146437 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/91f37707-14d5-4c53-9a0a-86d8249d42cf-ceilometer-tls-certs\") pod \"91f37707-14d5-4c53-9a0a-86d8249d42cf\" (UID: \"91f37707-14d5-4c53-9a0a-86d8249d42cf\") " Oct 01 14:05:35 crc kubenswrapper[4605]: I1001 14:05:35.146512 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/91f37707-14d5-4c53-9a0a-86d8249d42cf-run-httpd\") pod \"91f37707-14d5-4c53-9a0a-86d8249d42cf\" (UID: \"91f37707-14d5-4c53-9a0a-86d8249d42cf\") " Oct 01 14:05:35 crc kubenswrapper[4605]: I1001 14:05:35.146539 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91f37707-14d5-4c53-9a0a-86d8249d42cf-combined-ca-bundle\") pod \"91f37707-14d5-4c53-9a0a-86d8249d42cf\" (UID: \"91f37707-14d5-4c53-9a0a-86d8249d42cf\") " Oct 01 14:05:35 crc kubenswrapper[4605]: I1001 14:05:35.147377 4605 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/91f37707-14d5-4c53-9a0a-86d8249d42cf-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 01 14:05:35 crc kubenswrapper[4605]: I1001 14:05:35.147625 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/91f37707-14d5-4c53-9a0a-86d8249d42cf-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "91f37707-14d5-4c53-9a0a-86d8249d42cf" (UID: "91f37707-14d5-4c53-9a0a-86d8249d42cf"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:05:35 crc kubenswrapper[4605]: I1001 14:05:35.150693 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/91f37707-14d5-4c53-9a0a-86d8249d42cf-kube-api-access-jcl85" (OuterVolumeSpecName: "kube-api-access-jcl85") pod "91f37707-14d5-4c53-9a0a-86d8249d42cf" (UID: "91f37707-14d5-4c53-9a0a-86d8249d42cf"). InnerVolumeSpecName "kube-api-access-jcl85". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:05:35 crc kubenswrapper[4605]: I1001 14:05:35.154370 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91f37707-14d5-4c53-9a0a-86d8249d42cf-scripts" (OuterVolumeSpecName: "scripts") pod "91f37707-14d5-4c53-9a0a-86d8249d42cf" (UID: "91f37707-14d5-4c53-9a0a-86d8249d42cf"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:05:35 crc kubenswrapper[4605]: I1001 14:05:35.230777 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-g6sp8"] Oct 01 14:05:35 crc kubenswrapper[4605]: E1001 14:05:35.231232 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91f37707-14d5-4c53-9a0a-86d8249d42cf" containerName="ceilometer-central-agent" Oct 01 14:05:35 crc kubenswrapper[4605]: I1001 14:05:35.231248 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="91f37707-14d5-4c53-9a0a-86d8249d42cf" containerName="ceilometer-central-agent" Oct 01 14:05:35 crc kubenswrapper[4605]: E1001 14:05:35.231276 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91f37707-14d5-4c53-9a0a-86d8249d42cf" containerName="ceilometer-notification-agent" Oct 01 14:05:35 crc kubenswrapper[4605]: I1001 14:05:35.231282 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="91f37707-14d5-4c53-9a0a-86d8249d42cf" containerName="ceilometer-notification-agent" Oct 01 14:05:35 crc kubenswrapper[4605]: E1001 14:05:35.231300 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91f37707-14d5-4c53-9a0a-86d8249d42cf" containerName="sg-core" Oct 01 14:05:35 crc kubenswrapper[4605]: I1001 14:05:35.231306 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="91f37707-14d5-4c53-9a0a-86d8249d42cf" containerName="sg-core" Oct 01 14:05:35 crc kubenswrapper[4605]: E1001 14:05:35.231318 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91f37707-14d5-4c53-9a0a-86d8249d42cf" containerName="proxy-httpd" Oct 01 14:05:35 crc kubenswrapper[4605]: I1001 14:05:35.231323 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="91f37707-14d5-4c53-9a0a-86d8249d42cf" containerName="proxy-httpd" Oct 01 14:05:35 crc kubenswrapper[4605]: I1001 14:05:35.231568 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="91f37707-14d5-4c53-9a0a-86d8249d42cf" containerName="ceilometer-central-agent" Oct 01 14:05:35 crc kubenswrapper[4605]: I1001 14:05:35.231581 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="91f37707-14d5-4c53-9a0a-86d8249d42cf" containerName="ceilometer-notification-agent" Oct 01 14:05:35 crc kubenswrapper[4605]: I1001 14:05:35.231617 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="91f37707-14d5-4c53-9a0a-86d8249d42cf" containerName="proxy-httpd" Oct 01 14:05:35 crc kubenswrapper[4605]: I1001 14:05:35.231629 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="91f37707-14d5-4c53-9a0a-86d8249d42cf" containerName="sg-core" Oct 01 14:05:35 crc kubenswrapper[4605]: I1001 14:05:35.232419 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91f37707-14d5-4c53-9a0a-86d8249d42cf-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "91f37707-14d5-4c53-9a0a-86d8249d42cf" (UID: "91f37707-14d5-4c53-9a0a-86d8249d42cf"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:05:35 crc kubenswrapper[4605]: I1001 14:05:35.234915 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-g6sp8" Oct 01 14:05:35 crc kubenswrapper[4605]: I1001 14:05:35.243994 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Oct 01 14:05:35 crc kubenswrapper[4605]: I1001 14:05:35.244205 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Oct 01 14:05:35 crc kubenswrapper[4605]: I1001 14:05:35.249574 4605 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/91f37707-14d5-4c53-9a0a-86d8249d42cf-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 01 14:05:35 crc kubenswrapper[4605]: I1001 14:05:35.249775 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jcl85\" (UniqueName: \"kubernetes.io/projected/91f37707-14d5-4c53-9a0a-86d8249d42cf-kube-api-access-jcl85\") on node \"crc\" DevicePath \"\"" Oct 01 14:05:35 crc kubenswrapper[4605]: I1001 14:05:35.249861 4605 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/91f37707-14d5-4c53-9a0a-86d8249d42cf-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 01 14:05:35 crc kubenswrapper[4605]: I1001 14:05:35.249942 4605 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/91f37707-14d5-4c53-9a0a-86d8249d42cf-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 14:05:35 crc kubenswrapper[4605]: I1001 14:05:35.257568 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-g6sp8"] Oct 01 14:05:35 crc kubenswrapper[4605]: I1001 14:05:35.269962 4605 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="4ed8f49c-0b65-433b-b5a9-848e4e920b86" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.198:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 01 14:05:35 crc kubenswrapper[4605]: I1001 14:05:35.270564 4605 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="4ed8f49c-0b65-433b-b5a9-848e4e920b86" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.198:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 01 14:05:35 crc kubenswrapper[4605]: I1001 14:05:35.325084 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91f37707-14d5-4c53-9a0a-86d8249d42cf-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "91f37707-14d5-4c53-9a0a-86d8249d42cf" (UID: "91f37707-14d5-4c53-9a0a-86d8249d42cf"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:05:35 crc kubenswrapper[4605]: I1001 14:05:35.327298 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91f37707-14d5-4c53-9a0a-86d8249d42cf-config-data" (OuterVolumeSpecName: "config-data") pod "91f37707-14d5-4c53-9a0a-86d8249d42cf" (UID: "91f37707-14d5-4c53-9a0a-86d8249d42cf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:05:35 crc kubenswrapper[4605]: I1001 14:05:35.339958 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91f37707-14d5-4c53-9a0a-86d8249d42cf-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "91f37707-14d5-4c53-9a0a-86d8249d42cf" (UID: "91f37707-14d5-4c53-9a0a-86d8249d42cf"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:05:35 crc kubenswrapper[4605]: I1001 14:05:35.351870 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p54d9\" (UniqueName: \"kubernetes.io/projected/db269c11-d648-45e6-baa2-ffa53799b1d6-kube-api-access-p54d9\") pod \"nova-cell1-cell-mapping-g6sp8\" (UID: \"db269c11-d648-45e6-baa2-ffa53799b1d6\") " pod="openstack/nova-cell1-cell-mapping-g6sp8" Oct 01 14:05:35 crc kubenswrapper[4605]: I1001 14:05:35.351990 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db269c11-d648-45e6-baa2-ffa53799b1d6-config-data\") pod \"nova-cell1-cell-mapping-g6sp8\" (UID: \"db269c11-d648-45e6-baa2-ffa53799b1d6\") " pod="openstack/nova-cell1-cell-mapping-g6sp8" Oct 01 14:05:35 crc kubenswrapper[4605]: I1001 14:05:35.352012 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db269c11-d648-45e6-baa2-ffa53799b1d6-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-g6sp8\" (UID: \"db269c11-d648-45e6-baa2-ffa53799b1d6\") " pod="openstack/nova-cell1-cell-mapping-g6sp8" Oct 01 14:05:35 crc kubenswrapper[4605]: I1001 14:05:35.352032 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/db269c11-d648-45e6-baa2-ffa53799b1d6-scripts\") pod \"nova-cell1-cell-mapping-g6sp8\" (UID: \"db269c11-d648-45e6-baa2-ffa53799b1d6\") " pod="openstack/nova-cell1-cell-mapping-g6sp8" Oct 01 14:05:35 crc kubenswrapper[4605]: I1001 14:05:35.352153 4605 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/91f37707-14d5-4c53-9a0a-86d8249d42cf-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 01 14:05:35 crc kubenswrapper[4605]: I1001 14:05:35.352164 4605 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91f37707-14d5-4c53-9a0a-86d8249d42cf-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 14:05:35 crc kubenswrapper[4605]: I1001 14:05:35.352173 4605 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91f37707-14d5-4c53-9a0a-86d8249d42cf-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 14:05:35 crc kubenswrapper[4605]: I1001 14:05:35.453676 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db269c11-d648-45e6-baa2-ffa53799b1d6-config-data\") pod \"nova-cell1-cell-mapping-g6sp8\" (UID: \"db269c11-d648-45e6-baa2-ffa53799b1d6\") " pod="openstack/nova-cell1-cell-mapping-g6sp8" Oct 01 14:05:35 crc kubenswrapper[4605]: I1001 14:05:35.454539 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db269c11-d648-45e6-baa2-ffa53799b1d6-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-g6sp8\" (UID: \"db269c11-d648-45e6-baa2-ffa53799b1d6\") " pod="openstack/nova-cell1-cell-mapping-g6sp8" Oct 01 14:05:35 crc kubenswrapper[4605]: I1001 14:05:35.454569 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/db269c11-d648-45e6-baa2-ffa53799b1d6-scripts\") pod \"nova-cell1-cell-mapping-g6sp8\" (UID: \"db269c11-d648-45e6-baa2-ffa53799b1d6\") " pod="openstack/nova-cell1-cell-mapping-g6sp8" Oct 01 14:05:35 crc kubenswrapper[4605]: I1001 14:05:35.454767 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p54d9\" (UniqueName: \"kubernetes.io/projected/db269c11-d648-45e6-baa2-ffa53799b1d6-kube-api-access-p54d9\") pod \"nova-cell1-cell-mapping-g6sp8\" (UID: \"db269c11-d648-45e6-baa2-ffa53799b1d6\") " pod="openstack/nova-cell1-cell-mapping-g6sp8" Oct 01 14:05:35 crc kubenswrapper[4605]: I1001 14:05:35.459426 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/db269c11-d648-45e6-baa2-ffa53799b1d6-scripts\") pod \"nova-cell1-cell-mapping-g6sp8\" (UID: \"db269c11-d648-45e6-baa2-ffa53799b1d6\") " pod="openstack/nova-cell1-cell-mapping-g6sp8" Oct 01 14:05:35 crc kubenswrapper[4605]: I1001 14:05:35.460010 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db269c11-d648-45e6-baa2-ffa53799b1d6-config-data\") pod \"nova-cell1-cell-mapping-g6sp8\" (UID: \"db269c11-d648-45e6-baa2-ffa53799b1d6\") " pod="openstack/nova-cell1-cell-mapping-g6sp8" Oct 01 14:05:35 crc kubenswrapper[4605]: I1001 14:05:35.460520 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db269c11-d648-45e6-baa2-ffa53799b1d6-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-g6sp8\" (UID: \"db269c11-d648-45e6-baa2-ffa53799b1d6\") " pod="openstack/nova-cell1-cell-mapping-g6sp8" Oct 01 14:05:35 crc kubenswrapper[4605]: I1001 14:05:35.474812 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p54d9\" (UniqueName: \"kubernetes.io/projected/db269c11-d648-45e6-baa2-ffa53799b1d6-kube-api-access-p54d9\") pod \"nova-cell1-cell-mapping-g6sp8\" (UID: \"db269c11-d648-45e6-baa2-ffa53799b1d6\") " pod="openstack/nova-cell1-cell-mapping-g6sp8" Oct 01 14:05:35 crc kubenswrapper[4605]: I1001 14:05:35.716701 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-g6sp8" Oct 01 14:05:35 crc kubenswrapper[4605]: I1001 14:05:35.965355 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="704f2039-49de-4306-a09a-9e726133ee40" path="/var/lib/kubelet/pods/704f2039-49de-4306-a09a-9e726133ee40/volumes" Oct 01 14:05:36 crc kubenswrapper[4605]: I1001 14:05:36.000619 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"65f69992-26f8-4f45-beeb-3aee499bf19e","Type":"ContainerStarted","Data":"5c79797f02d4fb162eee6159e4e768992c6e85dd7b561ffe254139dc09a015f9"} Oct 01 14:05:36 crc kubenswrapper[4605]: I1001 14:05:36.000660 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"65f69992-26f8-4f45-beeb-3aee499bf19e","Type":"ContainerStarted","Data":"d97594c5ab5cd3ff7e18537c5071f49d81a9eb5cd7ca3feae668dbb5ca0ee440"} Oct 01 14:05:36 crc kubenswrapper[4605]: I1001 14:05:36.005618 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 14:05:36 crc kubenswrapper[4605]: I1001 14:05:36.006148 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"91f37707-14d5-4c53-9a0a-86d8249d42cf","Type":"ContainerDied","Data":"c918fb4b172821262637a81f4d4b4617b72a37b8bf0978fb4face9bae710f424"} Oct 01 14:05:36 crc kubenswrapper[4605]: I1001 14:05:36.006185 4605 scope.go:117] "RemoveContainer" containerID="c78d9bfa7d34ba0bc5ef17adc381e76099de52cd66a4b4d4adb8eb12967926ae" Oct 01 14:05:36 crc kubenswrapper[4605]: I1001 14:05:36.031590 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=3.031572034 podStartE2EDuration="3.031572034s" podCreationTimestamp="2025-10-01 14:05:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 14:05:36.026546227 +0000 UTC m=+1258.770522515" watchObservedRunningTime="2025-10-01 14:05:36.031572034 +0000 UTC m=+1258.775548242" Oct 01 14:05:36 crc kubenswrapper[4605]: I1001 14:05:36.044697 4605 scope.go:117] "RemoveContainer" containerID="6ca09ee632ed58998ef472386027b7e242212810aa3dae0a0f13278563b4a6b6" Oct 01 14:05:36 crc kubenswrapper[4605]: I1001 14:05:36.064531 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 01 14:05:36 crc kubenswrapper[4605]: I1001 14:05:36.068791 4605 scope.go:117] "RemoveContainer" containerID="605cbd1ab37eeb3b0c8b476382eb418eb1fce8f12a23399a2451c10c36397891" Oct 01 14:05:36 crc kubenswrapper[4605]: I1001 14:05:36.074974 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 01 14:05:36 crc kubenswrapper[4605]: I1001 14:05:36.084782 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 01 14:05:36 crc kubenswrapper[4605]: I1001 14:05:36.093356 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 14:05:36 crc kubenswrapper[4605]: I1001 14:05:36.097280 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 01 14:05:36 crc kubenswrapper[4605]: I1001 14:05:36.097532 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Oct 01 14:05:36 crc kubenswrapper[4605]: I1001 14:05:36.097543 4605 scope.go:117] "RemoveContainer" containerID="d0c4561f6b101474ce082218a2af0c419cdd40f340e43c26ce7a8beacbfbc15c" Oct 01 14:05:36 crc kubenswrapper[4605]: I1001 14:05:36.097668 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 01 14:05:36 crc kubenswrapper[4605]: I1001 14:05:36.109913 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 01 14:05:36 crc kubenswrapper[4605]: I1001 14:05:36.197347 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-g6sp8"] Oct 01 14:05:36 crc kubenswrapper[4605]: I1001 14:05:36.277990 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be76fa59-fd75-498e-9168-fa355659b827-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"be76fa59-fd75-498e-9168-fa355659b827\") " pod="openstack/ceilometer-0" Oct 01 14:05:36 crc kubenswrapper[4605]: I1001 14:05:36.278334 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/be76fa59-fd75-498e-9168-fa355659b827-log-httpd\") pod \"ceilometer-0\" (UID: \"be76fa59-fd75-498e-9168-fa355659b827\") " pod="openstack/ceilometer-0" Oct 01 14:05:36 crc kubenswrapper[4605]: I1001 14:05:36.278363 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/be76fa59-fd75-498e-9168-fa355659b827-run-httpd\") pod \"ceilometer-0\" (UID: \"be76fa59-fd75-498e-9168-fa355659b827\") " pod="openstack/ceilometer-0" Oct 01 14:05:36 crc kubenswrapper[4605]: I1001 14:05:36.278380 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zr5dm\" (UniqueName: \"kubernetes.io/projected/be76fa59-fd75-498e-9168-fa355659b827-kube-api-access-zr5dm\") pod \"ceilometer-0\" (UID: \"be76fa59-fd75-498e-9168-fa355659b827\") " pod="openstack/ceilometer-0" Oct 01 14:05:36 crc kubenswrapper[4605]: I1001 14:05:36.278441 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be76fa59-fd75-498e-9168-fa355659b827-config-data\") pod \"ceilometer-0\" (UID: \"be76fa59-fd75-498e-9168-fa355659b827\") " pod="openstack/ceilometer-0" Oct 01 14:05:36 crc kubenswrapper[4605]: I1001 14:05:36.278457 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/be76fa59-fd75-498e-9168-fa355659b827-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"be76fa59-fd75-498e-9168-fa355659b827\") " pod="openstack/ceilometer-0" Oct 01 14:05:36 crc kubenswrapper[4605]: I1001 14:05:36.278524 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/be76fa59-fd75-498e-9168-fa355659b827-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"be76fa59-fd75-498e-9168-fa355659b827\") " pod="openstack/ceilometer-0" Oct 01 14:05:36 crc kubenswrapper[4605]: I1001 14:05:36.278577 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/be76fa59-fd75-498e-9168-fa355659b827-scripts\") pod \"ceilometer-0\" (UID: \"be76fa59-fd75-498e-9168-fa355659b827\") " pod="openstack/ceilometer-0" Oct 01 14:05:36 crc kubenswrapper[4605]: I1001 14:05:36.380368 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/be76fa59-fd75-498e-9168-fa355659b827-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"be76fa59-fd75-498e-9168-fa355659b827\") " pod="openstack/ceilometer-0" Oct 01 14:05:36 crc kubenswrapper[4605]: I1001 14:05:36.380474 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/be76fa59-fd75-498e-9168-fa355659b827-scripts\") pod \"ceilometer-0\" (UID: \"be76fa59-fd75-498e-9168-fa355659b827\") " pod="openstack/ceilometer-0" Oct 01 14:05:36 crc kubenswrapper[4605]: I1001 14:05:36.380512 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be76fa59-fd75-498e-9168-fa355659b827-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"be76fa59-fd75-498e-9168-fa355659b827\") " pod="openstack/ceilometer-0" Oct 01 14:05:36 crc kubenswrapper[4605]: I1001 14:05:36.380538 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/be76fa59-fd75-498e-9168-fa355659b827-log-httpd\") pod \"ceilometer-0\" (UID: \"be76fa59-fd75-498e-9168-fa355659b827\") " pod="openstack/ceilometer-0" Oct 01 14:05:36 crc kubenswrapper[4605]: I1001 14:05:36.380558 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/be76fa59-fd75-498e-9168-fa355659b827-run-httpd\") pod \"ceilometer-0\" (UID: \"be76fa59-fd75-498e-9168-fa355659b827\") " pod="openstack/ceilometer-0" Oct 01 14:05:36 crc kubenswrapper[4605]: I1001 14:05:36.380576 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zr5dm\" (UniqueName: \"kubernetes.io/projected/be76fa59-fd75-498e-9168-fa355659b827-kube-api-access-zr5dm\") pod \"ceilometer-0\" (UID: \"be76fa59-fd75-498e-9168-fa355659b827\") " pod="openstack/ceilometer-0" Oct 01 14:05:36 crc kubenswrapper[4605]: I1001 14:05:36.380622 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be76fa59-fd75-498e-9168-fa355659b827-config-data\") pod \"ceilometer-0\" (UID: \"be76fa59-fd75-498e-9168-fa355659b827\") " pod="openstack/ceilometer-0" Oct 01 14:05:36 crc kubenswrapper[4605]: I1001 14:05:36.380636 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/be76fa59-fd75-498e-9168-fa355659b827-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"be76fa59-fd75-498e-9168-fa355659b827\") " pod="openstack/ceilometer-0" Oct 01 14:05:36 crc kubenswrapper[4605]: I1001 14:05:36.381357 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/be76fa59-fd75-498e-9168-fa355659b827-log-httpd\") pod \"ceilometer-0\" (UID: \"be76fa59-fd75-498e-9168-fa355659b827\") " pod="openstack/ceilometer-0" Oct 01 14:05:36 crc kubenswrapper[4605]: I1001 14:05:36.382436 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/be76fa59-fd75-498e-9168-fa355659b827-run-httpd\") pod \"ceilometer-0\" (UID: \"be76fa59-fd75-498e-9168-fa355659b827\") " pod="openstack/ceilometer-0" Oct 01 14:05:36 crc kubenswrapper[4605]: I1001 14:05:36.387561 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/be76fa59-fd75-498e-9168-fa355659b827-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"be76fa59-fd75-498e-9168-fa355659b827\") " pod="openstack/ceilometer-0" Oct 01 14:05:36 crc kubenswrapper[4605]: I1001 14:05:36.388530 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/be76fa59-fd75-498e-9168-fa355659b827-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"be76fa59-fd75-498e-9168-fa355659b827\") " pod="openstack/ceilometer-0" Oct 01 14:05:36 crc kubenswrapper[4605]: I1001 14:05:36.396212 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/be76fa59-fd75-498e-9168-fa355659b827-scripts\") pod \"ceilometer-0\" (UID: \"be76fa59-fd75-498e-9168-fa355659b827\") " pod="openstack/ceilometer-0" Oct 01 14:05:36 crc kubenswrapper[4605]: I1001 14:05:36.396816 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be76fa59-fd75-498e-9168-fa355659b827-config-data\") pod \"ceilometer-0\" (UID: \"be76fa59-fd75-498e-9168-fa355659b827\") " pod="openstack/ceilometer-0" Oct 01 14:05:36 crc kubenswrapper[4605]: I1001 14:05:36.397133 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be76fa59-fd75-498e-9168-fa355659b827-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"be76fa59-fd75-498e-9168-fa355659b827\") " pod="openstack/ceilometer-0" Oct 01 14:05:36 crc kubenswrapper[4605]: I1001 14:05:36.401693 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zr5dm\" (UniqueName: \"kubernetes.io/projected/be76fa59-fd75-498e-9168-fa355659b827-kube-api-access-zr5dm\") pod \"ceilometer-0\" (UID: \"be76fa59-fd75-498e-9168-fa355659b827\") " pod="openstack/ceilometer-0" Oct 01 14:05:36 crc kubenswrapper[4605]: I1001 14:05:36.475850 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 01 14:05:37 crc kubenswrapper[4605]: I1001 14:05:37.016038 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-g6sp8" event={"ID":"db269c11-d648-45e6-baa2-ffa53799b1d6","Type":"ContainerStarted","Data":"9ba34209f48d3a15cd7dc1488e53fa06fddb53fd75a3123c9226f90490efabb3"} Oct 01 14:05:37 crc kubenswrapper[4605]: I1001 14:05:37.016590 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-g6sp8" event={"ID":"db269c11-d648-45e6-baa2-ffa53799b1d6","Type":"ContainerStarted","Data":"c18e95028b75e38b9750f79d9f2b2b6bd2f9983c12a7e20bb30f9f6f85a3b0f2"} Oct 01 14:05:37 crc kubenswrapper[4605]: I1001 14:05:37.032898 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-g6sp8" podStartSLOduration=2.032878237 podStartE2EDuration="2.032878237s" podCreationTimestamp="2025-10-01 14:05:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 14:05:37.031465092 +0000 UTC m=+1259.775441310" watchObservedRunningTime="2025-10-01 14:05:37.032878237 +0000 UTC m=+1259.776854445" Oct 01 14:05:37 crc kubenswrapper[4605]: I1001 14:05:37.445331 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-59cf4bdb65-nb6z5" Oct 01 14:05:37 crc kubenswrapper[4605]: I1001 14:05:37.500390 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-llpx4"] Oct 01 14:05:37 crc kubenswrapper[4605]: I1001 14:05:37.500619 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-845d6d6f59-llpx4" podUID="44a71971-a13c-48ae-b60f-e0fb01c4a7f9" containerName="dnsmasq-dns" containerID="cri-o://0b7f92f09775f039e8efb146ca61866701e3a075f2a33935a753725fa9744c1b" gracePeriod=10 Oct 01 14:05:37 crc kubenswrapper[4605]: I1001 14:05:37.693996 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 01 14:05:37 crc kubenswrapper[4605]: I1001 14:05:37.736589 4605 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 01 14:05:37 crc kubenswrapper[4605]: I1001 14:05:37.999389 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="91f37707-14d5-4c53-9a0a-86d8249d42cf" path="/var/lib/kubelet/pods/91f37707-14d5-4c53-9a0a-86d8249d42cf/volumes" Oct 01 14:05:38 crc kubenswrapper[4605]: I1001 14:05:38.046222 4605 generic.go:334] "Generic (PLEG): container finished" podID="44a71971-a13c-48ae-b60f-e0fb01c4a7f9" containerID="0b7f92f09775f039e8efb146ca61866701e3a075f2a33935a753725fa9744c1b" exitCode=0 Oct 01 14:05:38 crc kubenswrapper[4605]: I1001 14:05:38.046316 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-845d6d6f59-llpx4" event={"ID":"44a71971-a13c-48ae-b60f-e0fb01c4a7f9","Type":"ContainerDied","Data":"0b7f92f09775f039e8efb146ca61866701e3a075f2a33935a753725fa9744c1b"} Oct 01 14:05:38 crc kubenswrapper[4605]: I1001 14:05:38.052673 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"be76fa59-fd75-498e-9168-fa355659b827","Type":"ContainerStarted","Data":"a76423c695e8cf71cd94f8068d8aee5ffe995a7295c086f58b8c9a36824b1f06"} Oct 01 14:05:38 crc kubenswrapper[4605]: I1001 14:05:38.191928 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-845d6d6f59-llpx4" Oct 01 14:05:38 crc kubenswrapper[4605]: I1001 14:05:38.329377 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-75wqd\" (UniqueName: \"kubernetes.io/projected/44a71971-a13c-48ae-b60f-e0fb01c4a7f9-kube-api-access-75wqd\") pod \"44a71971-a13c-48ae-b60f-e0fb01c4a7f9\" (UID: \"44a71971-a13c-48ae-b60f-e0fb01c4a7f9\") " Oct 01 14:05:38 crc kubenswrapper[4605]: I1001 14:05:38.329501 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/44a71971-a13c-48ae-b60f-e0fb01c4a7f9-ovsdbserver-sb\") pod \"44a71971-a13c-48ae-b60f-e0fb01c4a7f9\" (UID: \"44a71971-a13c-48ae-b60f-e0fb01c4a7f9\") " Oct 01 14:05:38 crc kubenswrapper[4605]: I1001 14:05:38.329555 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/44a71971-a13c-48ae-b60f-e0fb01c4a7f9-ovsdbserver-nb\") pod \"44a71971-a13c-48ae-b60f-e0fb01c4a7f9\" (UID: \"44a71971-a13c-48ae-b60f-e0fb01c4a7f9\") " Oct 01 14:05:38 crc kubenswrapper[4605]: I1001 14:05:38.329648 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/44a71971-a13c-48ae-b60f-e0fb01c4a7f9-dns-svc\") pod \"44a71971-a13c-48ae-b60f-e0fb01c4a7f9\" (UID: \"44a71971-a13c-48ae-b60f-e0fb01c4a7f9\") " Oct 01 14:05:38 crc kubenswrapper[4605]: I1001 14:05:38.329727 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/44a71971-a13c-48ae-b60f-e0fb01c4a7f9-dns-swift-storage-0\") pod \"44a71971-a13c-48ae-b60f-e0fb01c4a7f9\" (UID: \"44a71971-a13c-48ae-b60f-e0fb01c4a7f9\") " Oct 01 14:05:38 crc kubenswrapper[4605]: I1001 14:05:38.330116 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/44a71971-a13c-48ae-b60f-e0fb01c4a7f9-config\") pod \"44a71971-a13c-48ae-b60f-e0fb01c4a7f9\" (UID: \"44a71971-a13c-48ae-b60f-e0fb01c4a7f9\") " Oct 01 14:05:38 crc kubenswrapper[4605]: I1001 14:05:38.358508 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44a71971-a13c-48ae-b60f-e0fb01c4a7f9-kube-api-access-75wqd" (OuterVolumeSpecName: "kube-api-access-75wqd") pod "44a71971-a13c-48ae-b60f-e0fb01c4a7f9" (UID: "44a71971-a13c-48ae-b60f-e0fb01c4a7f9"). InnerVolumeSpecName "kube-api-access-75wqd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:05:38 crc kubenswrapper[4605]: I1001 14:05:38.434325 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-75wqd\" (UniqueName: \"kubernetes.io/projected/44a71971-a13c-48ae-b60f-e0fb01c4a7f9-kube-api-access-75wqd\") on node \"crc\" DevicePath \"\"" Oct 01 14:05:38 crc kubenswrapper[4605]: I1001 14:05:38.485154 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/44a71971-a13c-48ae-b60f-e0fb01c4a7f9-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "44a71971-a13c-48ae-b60f-e0fb01c4a7f9" (UID: "44a71971-a13c-48ae-b60f-e0fb01c4a7f9"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:05:38 crc kubenswrapper[4605]: I1001 14:05:38.538165 4605 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/44a71971-a13c-48ae-b60f-e0fb01c4a7f9-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 01 14:05:38 crc kubenswrapper[4605]: I1001 14:05:38.603681 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/44a71971-a13c-48ae-b60f-e0fb01c4a7f9-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "44a71971-a13c-48ae-b60f-e0fb01c4a7f9" (UID: "44a71971-a13c-48ae-b60f-e0fb01c4a7f9"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:05:38 crc kubenswrapper[4605]: I1001 14:05:38.614624 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/44a71971-a13c-48ae-b60f-e0fb01c4a7f9-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "44a71971-a13c-48ae-b60f-e0fb01c4a7f9" (UID: "44a71971-a13c-48ae-b60f-e0fb01c4a7f9"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:05:38 crc kubenswrapper[4605]: I1001 14:05:38.632945 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/44a71971-a13c-48ae-b60f-e0fb01c4a7f9-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "44a71971-a13c-48ae-b60f-e0fb01c4a7f9" (UID: "44a71971-a13c-48ae-b60f-e0fb01c4a7f9"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:05:38 crc kubenswrapper[4605]: I1001 14:05:38.635655 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/44a71971-a13c-48ae-b60f-e0fb01c4a7f9-config" (OuterVolumeSpecName: "config") pod "44a71971-a13c-48ae-b60f-e0fb01c4a7f9" (UID: "44a71971-a13c-48ae-b60f-e0fb01c4a7f9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:05:38 crc kubenswrapper[4605]: I1001 14:05:38.640560 4605 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/44a71971-a13c-48ae-b60f-e0fb01c4a7f9-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 01 14:05:38 crc kubenswrapper[4605]: I1001 14:05:38.640697 4605 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/44a71971-a13c-48ae-b60f-e0fb01c4a7f9-config\") on node \"crc\" DevicePath \"\"" Oct 01 14:05:38 crc kubenswrapper[4605]: I1001 14:05:38.640752 4605 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/44a71971-a13c-48ae-b60f-e0fb01c4a7f9-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 01 14:05:38 crc kubenswrapper[4605]: I1001 14:05:38.640803 4605 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/44a71971-a13c-48ae-b60f-e0fb01c4a7f9-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 01 14:05:39 crc kubenswrapper[4605]: I1001 14:05:39.062071 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"be76fa59-fd75-498e-9168-fa355659b827","Type":"ContainerStarted","Data":"55473816e9f3f1707a75cf86ab8e7fd3aa2f09935f5a1c1be9ab6d2e54606542"} Oct 01 14:05:39 crc kubenswrapper[4605]: I1001 14:05:39.064653 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-845d6d6f59-llpx4" event={"ID":"44a71971-a13c-48ae-b60f-e0fb01c4a7f9","Type":"ContainerDied","Data":"848bb64d32647c3f17013a7b172fb7040018d4b78054df108ec1859cd1535035"} Oct 01 14:05:39 crc kubenswrapper[4605]: I1001 14:05:39.064714 4605 scope.go:117] "RemoveContainer" containerID="0b7f92f09775f039e8efb146ca61866701e3a075f2a33935a753725fa9744c1b" Oct 01 14:05:39 crc kubenswrapper[4605]: I1001 14:05:39.064727 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-845d6d6f59-llpx4" Oct 01 14:05:39 crc kubenswrapper[4605]: I1001 14:05:39.125527 4605 scope.go:117] "RemoveContainer" containerID="2288a5677210c3bebf2c134d0b5a6afe678df817037887e1305f37324afb2299" Oct 01 14:05:39 crc kubenswrapper[4605]: I1001 14:05:39.140242 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-llpx4"] Oct 01 14:05:39 crc kubenswrapper[4605]: I1001 14:05:39.160329 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-llpx4"] Oct 01 14:05:39 crc kubenswrapper[4605]: I1001 14:05:39.944062 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44a71971-a13c-48ae-b60f-e0fb01c4a7f9" path="/var/lib/kubelet/pods/44a71971-a13c-48ae-b60f-e0fb01c4a7f9/volumes" Oct 01 14:05:40 crc kubenswrapper[4605]: I1001 14:05:40.082801 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"be76fa59-fd75-498e-9168-fa355659b827","Type":"ContainerStarted","Data":"09b455b8b5cba42553ecbb634f236e7dbde60449658a1e1107dd662a04f5b052"} Oct 01 14:05:40 crc kubenswrapper[4605]: I1001 14:05:40.082846 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"be76fa59-fd75-498e-9168-fa355659b827","Type":"ContainerStarted","Data":"defa8d6c75cf9e917e9a9735a3e392f0dfa9fbfa077bbde049680deaa1d6968d"} Oct 01 14:05:42 crc kubenswrapper[4605]: I1001 14:05:42.102562 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"be76fa59-fd75-498e-9168-fa355659b827","Type":"ContainerStarted","Data":"01acfd1f3f8ed28e55d4f7ab46044f4485d360df9a571ad4c15e9f51e231f333"} Oct 01 14:05:42 crc kubenswrapper[4605]: I1001 14:05:42.103229 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 01 14:05:43 crc kubenswrapper[4605]: I1001 14:05:43.111797 4605 generic.go:334] "Generic (PLEG): container finished" podID="db269c11-d648-45e6-baa2-ffa53799b1d6" containerID="9ba34209f48d3a15cd7dc1488e53fa06fddb53fd75a3123c9226f90490efabb3" exitCode=0 Oct 01 14:05:43 crc kubenswrapper[4605]: I1001 14:05:43.111976 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-g6sp8" event={"ID":"db269c11-d648-45e6-baa2-ffa53799b1d6","Type":"ContainerDied","Data":"9ba34209f48d3a15cd7dc1488e53fa06fddb53fd75a3123c9226f90490efabb3"} Oct 01 14:05:43 crc kubenswrapper[4605]: I1001 14:05:43.140333 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.83418584 podStartE2EDuration="7.140316351s" podCreationTimestamp="2025-10-01 14:05:36 +0000 UTC" firstStartedPulling="2025-10-01 14:05:37.736359655 +0000 UTC m=+1260.480335863" lastFinishedPulling="2025-10-01 14:05:41.042490166 +0000 UTC m=+1263.786466374" observedRunningTime="2025-10-01 14:05:42.131810525 +0000 UTC m=+1264.875786733" watchObservedRunningTime="2025-10-01 14:05:43.140316351 +0000 UTC m=+1265.884292559" Oct 01 14:05:44 crc kubenswrapper[4605]: I1001 14:05:44.272927 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 01 14:05:44 crc kubenswrapper[4605]: I1001 14:05:44.283632 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 01 14:05:44 crc kubenswrapper[4605]: I1001 14:05:44.283724 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 01 14:05:44 crc kubenswrapper[4605]: I1001 14:05:44.350688 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 01 14:05:44 crc kubenswrapper[4605]: I1001 14:05:44.351698 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 01 14:05:44 crc kubenswrapper[4605]: I1001 14:05:44.546845 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-g6sp8" Oct 01 14:05:44 crc kubenswrapper[4605]: I1001 14:05:44.656195 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db269c11-d648-45e6-baa2-ffa53799b1d6-config-data\") pod \"db269c11-d648-45e6-baa2-ffa53799b1d6\" (UID: \"db269c11-d648-45e6-baa2-ffa53799b1d6\") " Oct 01 14:05:44 crc kubenswrapper[4605]: I1001 14:05:44.656459 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db269c11-d648-45e6-baa2-ffa53799b1d6-combined-ca-bundle\") pod \"db269c11-d648-45e6-baa2-ffa53799b1d6\" (UID: \"db269c11-d648-45e6-baa2-ffa53799b1d6\") " Oct 01 14:05:44 crc kubenswrapper[4605]: I1001 14:05:44.656545 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/db269c11-d648-45e6-baa2-ffa53799b1d6-scripts\") pod \"db269c11-d648-45e6-baa2-ffa53799b1d6\" (UID: \"db269c11-d648-45e6-baa2-ffa53799b1d6\") " Oct 01 14:05:44 crc kubenswrapper[4605]: I1001 14:05:44.657321 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p54d9\" (UniqueName: \"kubernetes.io/projected/db269c11-d648-45e6-baa2-ffa53799b1d6-kube-api-access-p54d9\") pod \"db269c11-d648-45e6-baa2-ffa53799b1d6\" (UID: \"db269c11-d648-45e6-baa2-ffa53799b1d6\") " Oct 01 14:05:44 crc kubenswrapper[4605]: I1001 14:05:44.664241 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/db269c11-d648-45e6-baa2-ffa53799b1d6-kube-api-access-p54d9" (OuterVolumeSpecName: "kube-api-access-p54d9") pod "db269c11-d648-45e6-baa2-ffa53799b1d6" (UID: "db269c11-d648-45e6-baa2-ffa53799b1d6"). InnerVolumeSpecName "kube-api-access-p54d9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:05:44 crc kubenswrapper[4605]: I1001 14:05:44.664933 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db269c11-d648-45e6-baa2-ffa53799b1d6-scripts" (OuterVolumeSpecName: "scripts") pod "db269c11-d648-45e6-baa2-ffa53799b1d6" (UID: "db269c11-d648-45e6-baa2-ffa53799b1d6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:05:44 crc kubenswrapper[4605]: I1001 14:05:44.698716 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db269c11-d648-45e6-baa2-ffa53799b1d6-config-data" (OuterVolumeSpecName: "config-data") pod "db269c11-d648-45e6-baa2-ffa53799b1d6" (UID: "db269c11-d648-45e6-baa2-ffa53799b1d6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:05:44 crc kubenswrapper[4605]: I1001 14:05:44.712247 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db269c11-d648-45e6-baa2-ffa53799b1d6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "db269c11-d648-45e6-baa2-ffa53799b1d6" (UID: "db269c11-d648-45e6-baa2-ffa53799b1d6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:05:44 crc kubenswrapper[4605]: I1001 14:05:44.759531 4605 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db269c11-d648-45e6-baa2-ffa53799b1d6-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 14:05:44 crc kubenswrapper[4605]: I1001 14:05:44.759567 4605 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db269c11-d648-45e6-baa2-ffa53799b1d6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 14:05:44 crc kubenswrapper[4605]: I1001 14:05:44.759579 4605 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/db269c11-d648-45e6-baa2-ffa53799b1d6-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 14:05:44 crc kubenswrapper[4605]: I1001 14:05:44.759590 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p54d9\" (UniqueName: \"kubernetes.io/projected/db269c11-d648-45e6-baa2-ffa53799b1d6-kube-api-access-p54d9\") on node \"crc\" DevicePath \"\"" Oct 01 14:05:45 crc kubenswrapper[4605]: I1001 14:05:45.131636 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-g6sp8" Oct 01 14:05:45 crc kubenswrapper[4605]: I1001 14:05:45.132407 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-g6sp8" event={"ID":"db269c11-d648-45e6-baa2-ffa53799b1d6","Type":"ContainerDied","Data":"c18e95028b75e38b9750f79d9f2b2b6bd2f9983c12a7e20bb30f9f6f85a3b0f2"} Oct 01 14:05:45 crc kubenswrapper[4605]: I1001 14:05:45.132627 4605 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c18e95028b75e38b9750f79d9f2b2b6bd2f9983c12a7e20bb30f9f6f85a3b0f2" Oct 01 14:05:45 crc kubenswrapper[4605]: I1001 14:05:45.173787 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 01 14:05:45 crc kubenswrapper[4605]: I1001 14:05:45.338477 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 01 14:05:45 crc kubenswrapper[4605]: I1001 14:05:45.363268 4605 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="65f69992-26f8-4f45-beeb-3aee499bf19e" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.201:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 01 14:05:45 crc kubenswrapper[4605]: I1001 14:05:45.363268 4605 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="65f69992-26f8-4f45-beeb-3aee499bf19e" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.201:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 01 14:05:45 crc kubenswrapper[4605]: I1001 14:05:45.389264 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 01 14:05:45 crc kubenswrapper[4605]: I1001 14:05:45.389479 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="528d903b-d9c0-442c-b3c3-8b576405a91b" containerName="nova-scheduler-scheduler" containerID="cri-o://a44a8367b80b3f0f87ef664c54f17edc590f66e9a7a67748cacc80e98df85ce5" gracePeriod=30 Oct 01 14:05:45 crc kubenswrapper[4605]: I1001 14:05:45.407619 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 01 14:05:46 crc kubenswrapper[4605]: I1001 14:05:46.140526 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="65f69992-26f8-4f45-beeb-3aee499bf19e" containerName="nova-api-log" containerID="cri-o://d97594c5ab5cd3ff7e18537c5071f49d81a9eb5cd7ca3feae668dbb5ca0ee440" gracePeriod=30 Oct 01 14:05:46 crc kubenswrapper[4605]: I1001 14:05:46.141067 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="65f69992-26f8-4f45-beeb-3aee499bf19e" containerName="nova-api-api" containerID="cri-o://5c79797f02d4fb162eee6159e4e768992c6e85dd7b561ffe254139dc09a015f9" gracePeriod=30 Oct 01 14:05:46 crc kubenswrapper[4605]: E1001 14:05:46.311697 4605 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a44a8367b80b3f0f87ef664c54f17edc590f66e9a7a67748cacc80e98df85ce5" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 01 14:05:46 crc kubenswrapper[4605]: E1001 14:05:46.314688 4605 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a44a8367b80b3f0f87ef664c54f17edc590f66e9a7a67748cacc80e98df85ce5" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 01 14:05:46 crc kubenswrapper[4605]: E1001 14:05:46.317793 4605 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a44a8367b80b3f0f87ef664c54f17edc590f66e9a7a67748cacc80e98df85ce5" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 01 14:05:46 crc kubenswrapper[4605]: E1001 14:05:46.317842 4605 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="528d903b-d9c0-442c-b3c3-8b576405a91b" containerName="nova-scheduler-scheduler" Oct 01 14:05:47 crc kubenswrapper[4605]: I1001 14:05:47.150641 4605 generic.go:334] "Generic (PLEG): container finished" podID="65f69992-26f8-4f45-beeb-3aee499bf19e" containerID="d97594c5ab5cd3ff7e18537c5071f49d81a9eb5cd7ca3feae668dbb5ca0ee440" exitCode=143 Oct 01 14:05:47 crc kubenswrapper[4605]: I1001 14:05:47.150706 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"65f69992-26f8-4f45-beeb-3aee499bf19e","Type":"ContainerDied","Data":"d97594c5ab5cd3ff7e18537c5071f49d81a9eb5cd7ca3feae668dbb5ca0ee440"} Oct 01 14:05:47 crc kubenswrapper[4605]: I1001 14:05:47.151724 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="4ed8f49c-0b65-433b-b5a9-848e4e920b86" containerName="nova-metadata-log" containerID="cri-o://78dae040228a1a7aafd3a86985cc81a66e35f3ecc520d4f57f5d67343d0cfff7" gracePeriod=30 Oct 01 14:05:47 crc kubenswrapper[4605]: I1001 14:05:47.151810 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="4ed8f49c-0b65-433b-b5a9-848e4e920b86" containerName="nova-metadata-metadata" containerID="cri-o://a149e587cbddcef7a8848568bb4dfd43e0b6ad7f2522459bf8b23d76e2bfb58a" gracePeriod=30 Oct 01 14:05:47 crc kubenswrapper[4605]: E1001 14:05:47.377038 4605 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4ed8f49c_0b65_433b_b5a9_848e4e920b86.slice/crio-conmon-78dae040228a1a7aafd3a86985cc81a66e35f3ecc520d4f57f5d67343d0cfff7.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4ed8f49c_0b65_433b_b5a9_848e4e920b86.slice/crio-78dae040228a1a7aafd3a86985cc81a66e35f3ecc520d4f57f5d67343d0cfff7.scope\": RecentStats: unable to find data in memory cache]" Oct 01 14:05:48 crc kubenswrapper[4605]: I1001 14:05:48.162167 4605 generic.go:334] "Generic (PLEG): container finished" podID="4ed8f49c-0b65-433b-b5a9-848e4e920b86" containerID="78dae040228a1a7aafd3a86985cc81a66e35f3ecc520d4f57f5d67343d0cfff7" exitCode=143 Oct 01 14:05:48 crc kubenswrapper[4605]: I1001 14:05:48.162264 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4ed8f49c-0b65-433b-b5a9-848e4e920b86","Type":"ContainerDied","Data":"78dae040228a1a7aafd3a86985cc81a66e35f3ecc520d4f57f5d67343d0cfff7"} Oct 01 14:05:50 crc kubenswrapper[4605]: I1001 14:05:50.182677 4605 generic.go:334] "Generic (PLEG): container finished" podID="528d903b-d9c0-442c-b3c3-8b576405a91b" containerID="a44a8367b80b3f0f87ef664c54f17edc590f66e9a7a67748cacc80e98df85ce5" exitCode=0 Oct 01 14:05:50 crc kubenswrapper[4605]: I1001 14:05:50.182756 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"528d903b-d9c0-442c-b3c3-8b576405a91b","Type":"ContainerDied","Data":"a44a8367b80b3f0f87ef664c54f17edc590f66e9a7a67748cacc80e98df85ce5"} Oct 01 14:05:50 crc kubenswrapper[4605]: I1001 14:05:50.477025 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 01 14:05:50 crc kubenswrapper[4605]: I1001 14:05:50.479494 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/528d903b-d9c0-442c-b3c3-8b576405a91b-combined-ca-bundle\") pod \"528d903b-d9c0-442c-b3c3-8b576405a91b\" (UID: \"528d903b-d9c0-442c-b3c3-8b576405a91b\") " Oct 01 14:05:50 crc kubenswrapper[4605]: I1001 14:05:50.479636 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nclzg\" (UniqueName: \"kubernetes.io/projected/528d903b-d9c0-442c-b3c3-8b576405a91b-kube-api-access-nclzg\") pod \"528d903b-d9c0-442c-b3c3-8b576405a91b\" (UID: \"528d903b-d9c0-442c-b3c3-8b576405a91b\") " Oct 01 14:05:50 crc kubenswrapper[4605]: I1001 14:05:50.479679 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/528d903b-d9c0-442c-b3c3-8b576405a91b-config-data\") pod \"528d903b-d9c0-442c-b3c3-8b576405a91b\" (UID: \"528d903b-d9c0-442c-b3c3-8b576405a91b\") " Oct 01 14:05:50 crc kubenswrapper[4605]: I1001 14:05:50.489849 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/528d903b-d9c0-442c-b3c3-8b576405a91b-kube-api-access-nclzg" (OuterVolumeSpecName: "kube-api-access-nclzg") pod "528d903b-d9c0-442c-b3c3-8b576405a91b" (UID: "528d903b-d9c0-442c-b3c3-8b576405a91b"). InnerVolumeSpecName "kube-api-access-nclzg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:05:50 crc kubenswrapper[4605]: I1001 14:05:50.526772 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/528d903b-d9c0-442c-b3c3-8b576405a91b-config-data" (OuterVolumeSpecName: "config-data") pod "528d903b-d9c0-442c-b3c3-8b576405a91b" (UID: "528d903b-d9c0-442c-b3c3-8b576405a91b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:05:50 crc kubenswrapper[4605]: I1001 14:05:50.529457 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/528d903b-d9c0-442c-b3c3-8b576405a91b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "528d903b-d9c0-442c-b3c3-8b576405a91b" (UID: "528d903b-d9c0-442c-b3c3-8b576405a91b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:05:50 crc kubenswrapper[4605]: I1001 14:05:50.580834 4605 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/528d903b-d9c0-442c-b3c3-8b576405a91b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 14:05:50 crc kubenswrapper[4605]: I1001 14:05:50.581055 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nclzg\" (UniqueName: \"kubernetes.io/projected/528d903b-d9c0-442c-b3c3-8b576405a91b-kube-api-access-nclzg\") on node \"crc\" DevicePath \"\"" Oct 01 14:05:50 crc kubenswrapper[4605]: I1001 14:05:50.581174 4605 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/528d903b-d9c0-442c-b3c3-8b576405a91b-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 14:05:50 crc kubenswrapper[4605]: I1001 14:05:50.843684 4605 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="4ed8f49c-0b65-433b-b5a9-848e4e920b86" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.198:8775/\": read tcp 10.217.0.2:34868->10.217.0.198:8775: read: connection reset by peer" Oct 01 14:05:50 crc kubenswrapper[4605]: I1001 14:05:50.879210 4605 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="4ed8f49c-0b65-433b-b5a9-848e4e920b86" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.198:8775/\": read tcp 10.217.0.2:34856->10.217.0.198:8775: read: connection reset by peer" Oct 01 14:05:51 crc kubenswrapper[4605]: I1001 14:05:51.223866 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"528d903b-d9c0-442c-b3c3-8b576405a91b","Type":"ContainerDied","Data":"5da4b6aabac6f5d537cbbc9852e2dd69709ebbb5b4631520ab7525a800f083a2"} Oct 01 14:05:51 crc kubenswrapper[4605]: I1001 14:05:51.224333 4605 scope.go:117] "RemoveContainer" containerID="a44a8367b80b3f0f87ef664c54f17edc590f66e9a7a67748cacc80e98df85ce5" Oct 01 14:05:51 crc kubenswrapper[4605]: I1001 14:05:51.224515 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 01 14:05:51 crc kubenswrapper[4605]: I1001 14:05:51.231508 4605 generic.go:334] "Generic (PLEG): container finished" podID="4ed8f49c-0b65-433b-b5a9-848e4e920b86" containerID="a149e587cbddcef7a8848568bb4dfd43e0b6ad7f2522459bf8b23d76e2bfb58a" exitCode=0 Oct 01 14:05:51 crc kubenswrapper[4605]: I1001 14:05:51.231566 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4ed8f49c-0b65-433b-b5a9-848e4e920b86","Type":"ContainerDied","Data":"a149e587cbddcef7a8848568bb4dfd43e0b6ad7f2522459bf8b23d76e2bfb58a"} Oct 01 14:05:51 crc kubenswrapper[4605]: I1001 14:05:51.340053 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 01 14:05:51 crc kubenswrapper[4605]: I1001 14:05:51.352868 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 01 14:05:51 crc kubenswrapper[4605]: I1001 14:05:51.360217 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Oct 01 14:05:51 crc kubenswrapper[4605]: I1001 14:05:51.377261 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Oct 01 14:05:51 crc kubenswrapper[4605]: E1001 14:05:51.377739 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ed8f49c-0b65-433b-b5a9-848e4e920b86" containerName="nova-metadata-metadata" Oct 01 14:05:51 crc kubenswrapper[4605]: I1001 14:05:51.377761 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ed8f49c-0b65-433b-b5a9-848e4e920b86" containerName="nova-metadata-metadata" Oct 01 14:05:51 crc kubenswrapper[4605]: E1001 14:05:51.377776 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="528d903b-d9c0-442c-b3c3-8b576405a91b" containerName="nova-scheduler-scheduler" Oct 01 14:05:51 crc kubenswrapper[4605]: I1001 14:05:51.377785 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="528d903b-d9c0-442c-b3c3-8b576405a91b" containerName="nova-scheduler-scheduler" Oct 01 14:05:51 crc kubenswrapper[4605]: E1001 14:05:51.377796 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ed8f49c-0b65-433b-b5a9-848e4e920b86" containerName="nova-metadata-log" Oct 01 14:05:51 crc kubenswrapper[4605]: I1001 14:05:51.377804 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ed8f49c-0b65-433b-b5a9-848e4e920b86" containerName="nova-metadata-log" Oct 01 14:05:51 crc kubenswrapper[4605]: E1001 14:05:51.377823 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db269c11-d648-45e6-baa2-ffa53799b1d6" containerName="nova-manage" Oct 01 14:05:51 crc kubenswrapper[4605]: I1001 14:05:51.377832 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="db269c11-d648-45e6-baa2-ffa53799b1d6" containerName="nova-manage" Oct 01 14:05:51 crc kubenswrapper[4605]: E1001 14:05:51.377849 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="44a71971-a13c-48ae-b60f-e0fb01c4a7f9" containerName="init" Oct 01 14:05:51 crc kubenswrapper[4605]: I1001 14:05:51.377857 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="44a71971-a13c-48ae-b60f-e0fb01c4a7f9" containerName="init" Oct 01 14:05:51 crc kubenswrapper[4605]: E1001 14:05:51.377876 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="44a71971-a13c-48ae-b60f-e0fb01c4a7f9" containerName="dnsmasq-dns" Oct 01 14:05:51 crc kubenswrapper[4605]: I1001 14:05:51.377883 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="44a71971-a13c-48ae-b60f-e0fb01c4a7f9" containerName="dnsmasq-dns" Oct 01 14:05:51 crc kubenswrapper[4605]: I1001 14:05:51.378129 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="44a71971-a13c-48ae-b60f-e0fb01c4a7f9" containerName="dnsmasq-dns" Oct 01 14:05:51 crc kubenswrapper[4605]: I1001 14:05:51.378152 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="528d903b-d9c0-442c-b3c3-8b576405a91b" containerName="nova-scheduler-scheduler" Oct 01 14:05:51 crc kubenswrapper[4605]: I1001 14:05:51.378183 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="4ed8f49c-0b65-433b-b5a9-848e4e920b86" containerName="nova-metadata-metadata" Oct 01 14:05:51 crc kubenswrapper[4605]: I1001 14:05:51.378200 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="4ed8f49c-0b65-433b-b5a9-848e4e920b86" containerName="nova-metadata-log" Oct 01 14:05:51 crc kubenswrapper[4605]: I1001 14:05:51.378217 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="db269c11-d648-45e6-baa2-ffa53799b1d6" containerName="nova-manage" Oct 01 14:05:51 crc kubenswrapper[4605]: I1001 14:05:51.379489 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 01 14:05:51 crc kubenswrapper[4605]: I1001 14:05:51.393223 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Oct 01 14:05:51 crc kubenswrapper[4605]: I1001 14:05:51.393378 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/4ed8f49c-0b65-433b-b5a9-848e4e920b86-nova-metadata-tls-certs\") pod \"4ed8f49c-0b65-433b-b5a9-848e4e920b86\" (UID: \"4ed8f49c-0b65-433b-b5a9-848e4e920b86\") " Oct 01 14:05:51 crc kubenswrapper[4605]: I1001 14:05:51.393798 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c7bfc3c-d34b-4bcf-ba1c-71ed87c74927-config-data\") pod \"nova-scheduler-0\" (UID: \"3c7bfc3c-d34b-4bcf-ba1c-71ed87c74927\") " pod="openstack/nova-scheduler-0" Oct 01 14:05:51 crc kubenswrapper[4605]: I1001 14:05:51.393856 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 01 14:05:51 crc kubenswrapper[4605]: I1001 14:05:51.393883 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c7bfc3c-d34b-4bcf-ba1c-71ed87c74927-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"3c7bfc3c-d34b-4bcf-ba1c-71ed87c74927\") " pod="openstack/nova-scheduler-0" Oct 01 14:05:51 crc kubenswrapper[4605]: I1001 14:05:51.393907 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qt4hc\" (UniqueName: \"kubernetes.io/projected/3c7bfc3c-d34b-4bcf-ba1c-71ed87c74927-kube-api-access-qt4hc\") pod \"nova-scheduler-0\" (UID: \"3c7bfc3c-d34b-4bcf-ba1c-71ed87c74927\") " pod="openstack/nova-scheduler-0" Oct 01 14:05:51 crc kubenswrapper[4605]: I1001 14:05:51.438785 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4ed8f49c-0b65-433b-b5a9-848e4e920b86-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "4ed8f49c-0b65-433b-b5a9-848e4e920b86" (UID: "4ed8f49c-0b65-433b-b5a9-848e4e920b86"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:05:51 crc kubenswrapper[4605]: I1001 14:05:51.495901 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qt8nv\" (UniqueName: \"kubernetes.io/projected/4ed8f49c-0b65-433b-b5a9-848e4e920b86-kube-api-access-qt8nv\") pod \"4ed8f49c-0b65-433b-b5a9-848e4e920b86\" (UID: \"4ed8f49c-0b65-433b-b5a9-848e4e920b86\") " Oct 01 14:05:51 crc kubenswrapper[4605]: I1001 14:05:51.496285 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4ed8f49c-0b65-433b-b5a9-848e4e920b86-config-data\") pod \"4ed8f49c-0b65-433b-b5a9-848e4e920b86\" (UID: \"4ed8f49c-0b65-433b-b5a9-848e4e920b86\") " Oct 01 14:05:51 crc kubenswrapper[4605]: I1001 14:05:51.496444 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ed8f49c-0b65-433b-b5a9-848e4e920b86-combined-ca-bundle\") pod \"4ed8f49c-0b65-433b-b5a9-848e4e920b86\" (UID: \"4ed8f49c-0b65-433b-b5a9-848e4e920b86\") " Oct 01 14:05:51 crc kubenswrapper[4605]: I1001 14:05:51.496544 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4ed8f49c-0b65-433b-b5a9-848e4e920b86-logs\") pod \"4ed8f49c-0b65-433b-b5a9-848e4e920b86\" (UID: \"4ed8f49c-0b65-433b-b5a9-848e4e920b86\") " Oct 01 14:05:51 crc kubenswrapper[4605]: I1001 14:05:51.496957 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c7bfc3c-d34b-4bcf-ba1c-71ed87c74927-config-data\") pod \"nova-scheduler-0\" (UID: \"3c7bfc3c-d34b-4bcf-ba1c-71ed87c74927\") " pod="openstack/nova-scheduler-0" Oct 01 14:05:51 crc kubenswrapper[4605]: I1001 14:05:51.497129 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c7bfc3c-d34b-4bcf-ba1c-71ed87c74927-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"3c7bfc3c-d34b-4bcf-ba1c-71ed87c74927\") " pod="openstack/nova-scheduler-0" Oct 01 14:05:51 crc kubenswrapper[4605]: I1001 14:05:51.497210 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qt4hc\" (UniqueName: \"kubernetes.io/projected/3c7bfc3c-d34b-4bcf-ba1c-71ed87c74927-kube-api-access-qt4hc\") pod \"nova-scheduler-0\" (UID: \"3c7bfc3c-d34b-4bcf-ba1c-71ed87c74927\") " pod="openstack/nova-scheduler-0" Oct 01 14:05:51 crc kubenswrapper[4605]: I1001 14:05:51.497382 4605 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/4ed8f49c-0b65-433b-b5a9-848e4e920b86-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 01 14:05:51 crc kubenswrapper[4605]: I1001 14:05:51.497424 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4ed8f49c-0b65-433b-b5a9-848e4e920b86-logs" (OuterVolumeSpecName: "logs") pod "4ed8f49c-0b65-433b-b5a9-848e4e920b86" (UID: "4ed8f49c-0b65-433b-b5a9-848e4e920b86"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:05:51 crc kubenswrapper[4605]: I1001 14:05:51.500933 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4ed8f49c-0b65-433b-b5a9-848e4e920b86-kube-api-access-qt8nv" (OuterVolumeSpecName: "kube-api-access-qt8nv") pod "4ed8f49c-0b65-433b-b5a9-848e4e920b86" (UID: "4ed8f49c-0b65-433b-b5a9-848e4e920b86"). InnerVolumeSpecName "kube-api-access-qt8nv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:05:51 crc kubenswrapper[4605]: I1001 14:05:51.503863 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c7bfc3c-d34b-4bcf-ba1c-71ed87c74927-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"3c7bfc3c-d34b-4bcf-ba1c-71ed87c74927\") " pod="openstack/nova-scheduler-0" Oct 01 14:05:51 crc kubenswrapper[4605]: I1001 14:05:51.504604 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c7bfc3c-d34b-4bcf-ba1c-71ed87c74927-config-data\") pod \"nova-scheduler-0\" (UID: \"3c7bfc3c-d34b-4bcf-ba1c-71ed87c74927\") " pod="openstack/nova-scheduler-0" Oct 01 14:05:51 crc kubenswrapper[4605]: I1001 14:05:51.527874 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4ed8f49c-0b65-433b-b5a9-848e4e920b86-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4ed8f49c-0b65-433b-b5a9-848e4e920b86" (UID: "4ed8f49c-0b65-433b-b5a9-848e4e920b86"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:05:51 crc kubenswrapper[4605]: I1001 14:05:51.528771 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qt4hc\" (UniqueName: \"kubernetes.io/projected/3c7bfc3c-d34b-4bcf-ba1c-71ed87c74927-kube-api-access-qt4hc\") pod \"nova-scheduler-0\" (UID: \"3c7bfc3c-d34b-4bcf-ba1c-71ed87c74927\") " pod="openstack/nova-scheduler-0" Oct 01 14:05:51 crc kubenswrapper[4605]: I1001 14:05:51.540398 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4ed8f49c-0b65-433b-b5a9-848e4e920b86-config-data" (OuterVolumeSpecName: "config-data") pod "4ed8f49c-0b65-433b-b5a9-848e4e920b86" (UID: "4ed8f49c-0b65-433b-b5a9-848e4e920b86"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:05:51 crc kubenswrapper[4605]: I1001 14:05:51.598338 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qt8nv\" (UniqueName: \"kubernetes.io/projected/4ed8f49c-0b65-433b-b5a9-848e4e920b86-kube-api-access-qt8nv\") on node \"crc\" DevicePath \"\"" Oct 01 14:05:51 crc kubenswrapper[4605]: I1001 14:05:51.598603 4605 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4ed8f49c-0b65-433b-b5a9-848e4e920b86-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 14:05:51 crc kubenswrapper[4605]: I1001 14:05:51.598682 4605 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ed8f49c-0b65-433b-b5a9-848e4e920b86-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 14:05:51 crc kubenswrapper[4605]: I1001 14:05:51.598751 4605 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4ed8f49c-0b65-433b-b5a9-848e4e920b86-logs\") on node \"crc\" DevicePath \"\"" Oct 01 14:05:51 crc kubenswrapper[4605]: I1001 14:05:51.697145 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 01 14:05:51 crc kubenswrapper[4605]: I1001 14:05:51.945113 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="528d903b-d9c0-442c-b3c3-8b576405a91b" path="/var/lib/kubelet/pods/528d903b-d9c0-442c-b3c3-8b576405a91b/volumes" Oct 01 14:05:52 crc kubenswrapper[4605]: W1001 14:05:52.188498 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3c7bfc3c_d34b_4bcf_ba1c_71ed87c74927.slice/crio-3355394d2d6471a395e98ea371c8e392be15e6cbf92fac660e8af3d5816a4e22 WatchSource:0}: Error finding container 3355394d2d6471a395e98ea371c8e392be15e6cbf92fac660e8af3d5816a4e22: Status 404 returned error can't find the container with id 3355394d2d6471a395e98ea371c8e392be15e6cbf92fac660e8af3d5816a4e22 Oct 01 14:05:52 crc kubenswrapper[4605]: I1001 14:05:52.193506 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 01 14:05:52 crc kubenswrapper[4605]: I1001 14:05:52.250172 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4ed8f49c-0b65-433b-b5a9-848e4e920b86","Type":"ContainerDied","Data":"fe18d7499b8ec28a1b3d62220f6ff473ec2d955ec1062e4032d5d501c576e3d7"} Oct 01 14:05:52 crc kubenswrapper[4605]: I1001 14:05:52.250224 4605 scope.go:117] "RemoveContainer" containerID="a149e587cbddcef7a8848568bb4dfd43e0b6ad7f2522459bf8b23d76e2bfb58a" Oct 01 14:05:52 crc kubenswrapper[4605]: I1001 14:05:52.250316 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 01 14:05:52 crc kubenswrapper[4605]: I1001 14:05:52.255337 4605 generic.go:334] "Generic (PLEG): container finished" podID="65f69992-26f8-4f45-beeb-3aee499bf19e" containerID="5c79797f02d4fb162eee6159e4e768992c6e85dd7b561ffe254139dc09a015f9" exitCode=0 Oct 01 14:05:52 crc kubenswrapper[4605]: I1001 14:05:52.255400 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"65f69992-26f8-4f45-beeb-3aee499bf19e","Type":"ContainerDied","Data":"5c79797f02d4fb162eee6159e4e768992c6e85dd7b561ffe254139dc09a015f9"} Oct 01 14:05:52 crc kubenswrapper[4605]: I1001 14:05:52.255423 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"65f69992-26f8-4f45-beeb-3aee499bf19e","Type":"ContainerDied","Data":"52701961898fd0e6255b2f328591822358003359f44524b105ac32904ed1bba8"} Oct 01 14:05:52 crc kubenswrapper[4605]: I1001 14:05:52.255433 4605 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="52701961898fd0e6255b2f328591822358003359f44524b105ac32904ed1bba8" Oct 01 14:05:52 crc kubenswrapper[4605]: I1001 14:05:52.256506 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"3c7bfc3c-d34b-4bcf-ba1c-71ed87c74927","Type":"ContainerStarted","Data":"3355394d2d6471a395e98ea371c8e392be15e6cbf92fac660e8af3d5816a4e22"} Oct 01 14:05:52 crc kubenswrapper[4605]: I1001 14:05:52.303351 4605 scope.go:117] "RemoveContainer" containerID="78dae040228a1a7aafd3a86985cc81a66e35f3ecc520d4f57f5d67343d0cfff7" Oct 01 14:05:52 crc kubenswrapper[4605]: I1001 14:05:52.315377 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 01 14:05:52 crc kubenswrapper[4605]: I1001 14:05:52.336375 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 01 14:05:52 crc kubenswrapper[4605]: I1001 14:05:52.352037 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Oct 01 14:05:52 crc kubenswrapper[4605]: I1001 14:05:52.361703 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 01 14:05:52 crc kubenswrapper[4605]: E1001 14:05:52.362168 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65f69992-26f8-4f45-beeb-3aee499bf19e" containerName="nova-api-log" Oct 01 14:05:52 crc kubenswrapper[4605]: I1001 14:05:52.362191 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="65f69992-26f8-4f45-beeb-3aee499bf19e" containerName="nova-api-log" Oct 01 14:05:52 crc kubenswrapper[4605]: E1001 14:05:52.362239 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65f69992-26f8-4f45-beeb-3aee499bf19e" containerName="nova-api-api" Oct 01 14:05:52 crc kubenswrapper[4605]: I1001 14:05:52.362245 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="65f69992-26f8-4f45-beeb-3aee499bf19e" containerName="nova-api-api" Oct 01 14:05:52 crc kubenswrapper[4605]: I1001 14:05:52.362409 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="65f69992-26f8-4f45-beeb-3aee499bf19e" containerName="nova-api-log" Oct 01 14:05:52 crc kubenswrapper[4605]: I1001 14:05:52.362426 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="65f69992-26f8-4f45-beeb-3aee499bf19e" containerName="nova-api-api" Oct 01 14:05:52 crc kubenswrapper[4605]: I1001 14:05:52.363439 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 01 14:05:52 crc kubenswrapper[4605]: I1001 14:05:52.366734 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 01 14:05:52 crc kubenswrapper[4605]: I1001 14:05:52.367003 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Oct 01 14:05:52 crc kubenswrapper[4605]: I1001 14:05:52.383561 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 01 14:05:52 crc kubenswrapper[4605]: I1001 14:05:52.414028 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/65f69992-26f8-4f45-beeb-3aee499bf19e-internal-tls-certs\") pod \"65f69992-26f8-4f45-beeb-3aee499bf19e\" (UID: \"65f69992-26f8-4f45-beeb-3aee499bf19e\") " Oct 01 14:05:52 crc kubenswrapper[4605]: I1001 14:05:52.414488 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/65f69992-26f8-4f45-beeb-3aee499bf19e-logs\") pod \"65f69992-26f8-4f45-beeb-3aee499bf19e\" (UID: \"65f69992-26f8-4f45-beeb-3aee499bf19e\") " Oct 01 14:05:52 crc kubenswrapper[4605]: I1001 14:05:52.414698 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65f69992-26f8-4f45-beeb-3aee499bf19e-combined-ca-bundle\") pod \"65f69992-26f8-4f45-beeb-3aee499bf19e\" (UID: \"65f69992-26f8-4f45-beeb-3aee499bf19e\") " Oct 01 14:05:52 crc kubenswrapper[4605]: I1001 14:05:52.414845 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/65f69992-26f8-4f45-beeb-3aee499bf19e-public-tls-certs\") pod \"65f69992-26f8-4f45-beeb-3aee499bf19e\" (UID: \"65f69992-26f8-4f45-beeb-3aee499bf19e\") " Oct 01 14:05:52 crc kubenswrapper[4605]: I1001 14:05:52.414939 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65f69992-26f8-4f45-beeb-3aee499bf19e-config-data\") pod \"65f69992-26f8-4f45-beeb-3aee499bf19e\" (UID: \"65f69992-26f8-4f45-beeb-3aee499bf19e\") " Oct 01 14:05:52 crc kubenswrapper[4605]: I1001 14:05:52.415228 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6vxl8\" (UniqueName: \"kubernetes.io/projected/65f69992-26f8-4f45-beeb-3aee499bf19e-kube-api-access-6vxl8\") pod \"65f69992-26f8-4f45-beeb-3aee499bf19e\" (UID: \"65f69992-26f8-4f45-beeb-3aee499bf19e\") " Oct 01 14:05:52 crc kubenswrapper[4605]: I1001 14:05:52.414996 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/65f69992-26f8-4f45-beeb-3aee499bf19e-logs" (OuterVolumeSpecName: "logs") pod "65f69992-26f8-4f45-beeb-3aee499bf19e" (UID: "65f69992-26f8-4f45-beeb-3aee499bf19e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:05:52 crc kubenswrapper[4605]: I1001 14:05:52.426865 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/65f69992-26f8-4f45-beeb-3aee499bf19e-kube-api-access-6vxl8" (OuterVolumeSpecName: "kube-api-access-6vxl8") pod "65f69992-26f8-4f45-beeb-3aee499bf19e" (UID: "65f69992-26f8-4f45-beeb-3aee499bf19e"). InnerVolumeSpecName "kube-api-access-6vxl8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:05:52 crc kubenswrapper[4605]: I1001 14:05:52.497922 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65f69992-26f8-4f45-beeb-3aee499bf19e-config-data" (OuterVolumeSpecName: "config-data") pod "65f69992-26f8-4f45-beeb-3aee499bf19e" (UID: "65f69992-26f8-4f45-beeb-3aee499bf19e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:05:52 crc kubenswrapper[4605]: I1001 14:05:52.498308 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65f69992-26f8-4f45-beeb-3aee499bf19e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "65f69992-26f8-4f45-beeb-3aee499bf19e" (UID: "65f69992-26f8-4f45-beeb-3aee499bf19e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:05:52 crc kubenswrapper[4605]: I1001 14:05:52.516768 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65f69992-26f8-4f45-beeb-3aee499bf19e-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "65f69992-26f8-4f45-beeb-3aee499bf19e" (UID: "65f69992-26f8-4f45-beeb-3aee499bf19e"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:05:52 crc kubenswrapper[4605]: I1001 14:05:52.516992 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/65f69992-26f8-4f45-beeb-3aee499bf19e-internal-tls-certs\") pod \"65f69992-26f8-4f45-beeb-3aee499bf19e\" (UID: \"65f69992-26f8-4f45-beeb-3aee499bf19e\") " Oct 01 14:05:52 crc kubenswrapper[4605]: W1001 14:05:52.517155 4605 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/65f69992-26f8-4f45-beeb-3aee499bf19e/volumes/kubernetes.io~secret/internal-tls-certs Oct 01 14:05:52 crc kubenswrapper[4605]: I1001 14:05:52.517173 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65f69992-26f8-4f45-beeb-3aee499bf19e-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "65f69992-26f8-4f45-beeb-3aee499bf19e" (UID: "65f69992-26f8-4f45-beeb-3aee499bf19e"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:05:52 crc kubenswrapper[4605]: I1001 14:05:52.517476 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9z79v\" (UniqueName: \"kubernetes.io/projected/05f7dd13-a8f8-4263-ad0f-87d5972c6eb0-kube-api-access-9z79v\") pod \"nova-metadata-0\" (UID: \"05f7dd13-a8f8-4263-ad0f-87d5972c6eb0\") " pod="openstack/nova-metadata-0" Oct 01 14:05:52 crc kubenswrapper[4605]: I1001 14:05:52.517593 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05f7dd13-a8f8-4263-ad0f-87d5972c6eb0-config-data\") pod \"nova-metadata-0\" (UID: \"05f7dd13-a8f8-4263-ad0f-87d5972c6eb0\") " pod="openstack/nova-metadata-0" Oct 01 14:05:52 crc kubenswrapper[4605]: I1001 14:05:52.517662 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05f7dd13-a8f8-4263-ad0f-87d5972c6eb0-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"05f7dd13-a8f8-4263-ad0f-87d5972c6eb0\") " pod="openstack/nova-metadata-0" Oct 01 14:05:52 crc kubenswrapper[4605]: I1001 14:05:52.517735 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/05f7dd13-a8f8-4263-ad0f-87d5972c6eb0-logs\") pod \"nova-metadata-0\" (UID: \"05f7dd13-a8f8-4263-ad0f-87d5972c6eb0\") " pod="openstack/nova-metadata-0" Oct 01 14:05:52 crc kubenswrapper[4605]: I1001 14:05:52.517802 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/05f7dd13-a8f8-4263-ad0f-87d5972c6eb0-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"05f7dd13-a8f8-4263-ad0f-87d5972c6eb0\") " pod="openstack/nova-metadata-0" Oct 01 14:05:52 crc kubenswrapper[4605]: I1001 14:05:52.517907 4605 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65f69992-26f8-4f45-beeb-3aee499bf19e-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 14:05:52 crc kubenswrapper[4605]: I1001 14:05:52.517930 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6vxl8\" (UniqueName: \"kubernetes.io/projected/65f69992-26f8-4f45-beeb-3aee499bf19e-kube-api-access-6vxl8\") on node \"crc\" DevicePath \"\"" Oct 01 14:05:52 crc kubenswrapper[4605]: I1001 14:05:52.517942 4605 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/65f69992-26f8-4f45-beeb-3aee499bf19e-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 01 14:05:52 crc kubenswrapper[4605]: I1001 14:05:52.517953 4605 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/65f69992-26f8-4f45-beeb-3aee499bf19e-logs\") on node \"crc\" DevicePath \"\"" Oct 01 14:05:52 crc kubenswrapper[4605]: I1001 14:05:52.517963 4605 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65f69992-26f8-4f45-beeb-3aee499bf19e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 14:05:52 crc kubenswrapper[4605]: I1001 14:05:52.524557 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65f69992-26f8-4f45-beeb-3aee499bf19e-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "65f69992-26f8-4f45-beeb-3aee499bf19e" (UID: "65f69992-26f8-4f45-beeb-3aee499bf19e"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:05:52 crc kubenswrapper[4605]: I1001 14:05:52.619846 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/05f7dd13-a8f8-4263-ad0f-87d5972c6eb0-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"05f7dd13-a8f8-4263-ad0f-87d5972c6eb0\") " pod="openstack/nova-metadata-0" Oct 01 14:05:52 crc kubenswrapper[4605]: I1001 14:05:52.619978 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9z79v\" (UniqueName: \"kubernetes.io/projected/05f7dd13-a8f8-4263-ad0f-87d5972c6eb0-kube-api-access-9z79v\") pod \"nova-metadata-0\" (UID: \"05f7dd13-a8f8-4263-ad0f-87d5972c6eb0\") " pod="openstack/nova-metadata-0" Oct 01 14:05:52 crc kubenswrapper[4605]: I1001 14:05:52.620056 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05f7dd13-a8f8-4263-ad0f-87d5972c6eb0-config-data\") pod \"nova-metadata-0\" (UID: \"05f7dd13-a8f8-4263-ad0f-87d5972c6eb0\") " pod="openstack/nova-metadata-0" Oct 01 14:05:52 crc kubenswrapper[4605]: I1001 14:05:52.620087 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05f7dd13-a8f8-4263-ad0f-87d5972c6eb0-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"05f7dd13-a8f8-4263-ad0f-87d5972c6eb0\") " pod="openstack/nova-metadata-0" Oct 01 14:05:52 crc kubenswrapper[4605]: I1001 14:05:52.620175 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/05f7dd13-a8f8-4263-ad0f-87d5972c6eb0-logs\") pod \"nova-metadata-0\" (UID: \"05f7dd13-a8f8-4263-ad0f-87d5972c6eb0\") " pod="openstack/nova-metadata-0" Oct 01 14:05:52 crc kubenswrapper[4605]: I1001 14:05:52.620259 4605 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/65f69992-26f8-4f45-beeb-3aee499bf19e-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 01 14:05:52 crc kubenswrapper[4605]: I1001 14:05:52.620582 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/05f7dd13-a8f8-4263-ad0f-87d5972c6eb0-logs\") pod \"nova-metadata-0\" (UID: \"05f7dd13-a8f8-4263-ad0f-87d5972c6eb0\") " pod="openstack/nova-metadata-0" Oct 01 14:05:52 crc kubenswrapper[4605]: I1001 14:05:52.623714 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/05f7dd13-a8f8-4263-ad0f-87d5972c6eb0-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"05f7dd13-a8f8-4263-ad0f-87d5972c6eb0\") " pod="openstack/nova-metadata-0" Oct 01 14:05:52 crc kubenswrapper[4605]: I1001 14:05:52.623877 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05f7dd13-a8f8-4263-ad0f-87d5972c6eb0-config-data\") pod \"nova-metadata-0\" (UID: \"05f7dd13-a8f8-4263-ad0f-87d5972c6eb0\") " pod="openstack/nova-metadata-0" Oct 01 14:05:52 crc kubenswrapper[4605]: I1001 14:05:52.623973 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05f7dd13-a8f8-4263-ad0f-87d5972c6eb0-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"05f7dd13-a8f8-4263-ad0f-87d5972c6eb0\") " pod="openstack/nova-metadata-0" Oct 01 14:05:52 crc kubenswrapper[4605]: I1001 14:05:52.638596 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9z79v\" (UniqueName: \"kubernetes.io/projected/05f7dd13-a8f8-4263-ad0f-87d5972c6eb0-kube-api-access-9z79v\") pod \"nova-metadata-0\" (UID: \"05f7dd13-a8f8-4263-ad0f-87d5972c6eb0\") " pod="openstack/nova-metadata-0" Oct 01 14:05:52 crc kubenswrapper[4605]: I1001 14:05:52.684423 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 01 14:05:53 crc kubenswrapper[4605]: I1001 14:05:53.139226 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 01 14:05:53 crc kubenswrapper[4605]: W1001 14:05:53.141374 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod05f7dd13_a8f8_4263_ad0f_87d5972c6eb0.slice/crio-f26f2c9d86da579a51669f6bce8fe030d6efe2af289c3d5c06426b1d7c35d25e WatchSource:0}: Error finding container f26f2c9d86da579a51669f6bce8fe030d6efe2af289c3d5c06426b1d7c35d25e: Status 404 returned error can't find the container with id f26f2c9d86da579a51669f6bce8fe030d6efe2af289c3d5c06426b1d7c35d25e Oct 01 14:05:53 crc kubenswrapper[4605]: I1001 14:05:53.291204 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"3c7bfc3c-d34b-4bcf-ba1c-71ed87c74927","Type":"ContainerStarted","Data":"161b2faebecacc39d17ac9c911de17a4f31c6d199901f29a7874433b2f4d6ea6"} Oct 01 14:05:53 crc kubenswrapper[4605]: I1001 14:05:53.294880 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"05f7dd13-a8f8-4263-ad0f-87d5972c6eb0","Type":"ContainerStarted","Data":"f26f2c9d86da579a51669f6bce8fe030d6efe2af289c3d5c06426b1d7c35d25e"} Oct 01 14:05:53 crc kubenswrapper[4605]: I1001 14:05:53.294923 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 01 14:05:53 crc kubenswrapper[4605]: I1001 14:05:53.321476 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.321327909 podStartE2EDuration="2.321327909s" podCreationTimestamp="2025-10-01 14:05:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 14:05:53.309662615 +0000 UTC m=+1276.053638813" watchObservedRunningTime="2025-10-01 14:05:53.321327909 +0000 UTC m=+1276.065304117" Oct 01 14:05:53 crc kubenswrapper[4605]: I1001 14:05:53.345423 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 01 14:05:53 crc kubenswrapper[4605]: I1001 14:05:53.357302 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Oct 01 14:05:53 crc kubenswrapper[4605]: I1001 14:05:53.371244 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 01 14:05:53 crc kubenswrapper[4605]: I1001 14:05:53.375025 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 01 14:05:53 crc kubenswrapper[4605]: I1001 14:05:53.383269 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Oct 01 14:05:53 crc kubenswrapper[4605]: I1001 14:05:53.383383 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 01 14:05:53 crc kubenswrapper[4605]: I1001 14:05:53.384809 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Oct 01 14:05:53 crc kubenswrapper[4605]: I1001 14:05:53.385133 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 01 14:05:53 crc kubenswrapper[4605]: I1001 14:05:53.540224 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/885f1ef5-027b-49b0-9c25-444a307d3075-config-data\") pod \"nova-api-0\" (UID: \"885f1ef5-027b-49b0-9c25-444a307d3075\") " pod="openstack/nova-api-0" Oct 01 14:05:53 crc kubenswrapper[4605]: I1001 14:05:53.540279 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/885f1ef5-027b-49b0-9c25-444a307d3075-internal-tls-certs\") pod \"nova-api-0\" (UID: \"885f1ef5-027b-49b0-9c25-444a307d3075\") " pod="openstack/nova-api-0" Oct 01 14:05:53 crc kubenswrapper[4605]: I1001 14:05:53.540536 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-249v4\" (UniqueName: \"kubernetes.io/projected/885f1ef5-027b-49b0-9c25-444a307d3075-kube-api-access-249v4\") pod \"nova-api-0\" (UID: \"885f1ef5-027b-49b0-9c25-444a307d3075\") " pod="openstack/nova-api-0" Oct 01 14:05:53 crc kubenswrapper[4605]: I1001 14:05:53.540594 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/885f1ef5-027b-49b0-9c25-444a307d3075-public-tls-certs\") pod \"nova-api-0\" (UID: \"885f1ef5-027b-49b0-9c25-444a307d3075\") " pod="openstack/nova-api-0" Oct 01 14:05:53 crc kubenswrapper[4605]: I1001 14:05:53.540656 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/885f1ef5-027b-49b0-9c25-444a307d3075-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"885f1ef5-027b-49b0-9c25-444a307d3075\") " pod="openstack/nova-api-0" Oct 01 14:05:53 crc kubenswrapper[4605]: I1001 14:05:53.540736 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/885f1ef5-027b-49b0-9c25-444a307d3075-logs\") pod \"nova-api-0\" (UID: \"885f1ef5-027b-49b0-9c25-444a307d3075\") " pod="openstack/nova-api-0" Oct 01 14:05:53 crc kubenswrapper[4605]: I1001 14:05:53.642258 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/885f1ef5-027b-49b0-9c25-444a307d3075-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"885f1ef5-027b-49b0-9c25-444a307d3075\") " pod="openstack/nova-api-0" Oct 01 14:05:53 crc kubenswrapper[4605]: I1001 14:05:53.642367 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/885f1ef5-027b-49b0-9c25-444a307d3075-logs\") pod \"nova-api-0\" (UID: \"885f1ef5-027b-49b0-9c25-444a307d3075\") " pod="openstack/nova-api-0" Oct 01 14:05:53 crc kubenswrapper[4605]: I1001 14:05:53.642429 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/885f1ef5-027b-49b0-9c25-444a307d3075-config-data\") pod \"nova-api-0\" (UID: \"885f1ef5-027b-49b0-9c25-444a307d3075\") " pod="openstack/nova-api-0" Oct 01 14:05:53 crc kubenswrapper[4605]: I1001 14:05:53.642457 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/885f1ef5-027b-49b0-9c25-444a307d3075-internal-tls-certs\") pod \"nova-api-0\" (UID: \"885f1ef5-027b-49b0-9c25-444a307d3075\") " pod="openstack/nova-api-0" Oct 01 14:05:53 crc kubenswrapper[4605]: I1001 14:05:53.642517 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-249v4\" (UniqueName: \"kubernetes.io/projected/885f1ef5-027b-49b0-9c25-444a307d3075-kube-api-access-249v4\") pod \"nova-api-0\" (UID: \"885f1ef5-027b-49b0-9c25-444a307d3075\") " pod="openstack/nova-api-0" Oct 01 14:05:53 crc kubenswrapper[4605]: I1001 14:05:53.642583 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/885f1ef5-027b-49b0-9c25-444a307d3075-public-tls-certs\") pod \"nova-api-0\" (UID: \"885f1ef5-027b-49b0-9c25-444a307d3075\") " pod="openstack/nova-api-0" Oct 01 14:05:53 crc kubenswrapper[4605]: I1001 14:05:53.643817 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/885f1ef5-027b-49b0-9c25-444a307d3075-logs\") pod \"nova-api-0\" (UID: \"885f1ef5-027b-49b0-9c25-444a307d3075\") " pod="openstack/nova-api-0" Oct 01 14:05:53 crc kubenswrapper[4605]: I1001 14:05:53.646771 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/885f1ef5-027b-49b0-9c25-444a307d3075-public-tls-certs\") pod \"nova-api-0\" (UID: \"885f1ef5-027b-49b0-9c25-444a307d3075\") " pod="openstack/nova-api-0" Oct 01 14:05:53 crc kubenswrapper[4605]: I1001 14:05:53.648044 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/885f1ef5-027b-49b0-9c25-444a307d3075-internal-tls-certs\") pod \"nova-api-0\" (UID: \"885f1ef5-027b-49b0-9c25-444a307d3075\") " pod="openstack/nova-api-0" Oct 01 14:05:53 crc kubenswrapper[4605]: I1001 14:05:53.648936 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/885f1ef5-027b-49b0-9c25-444a307d3075-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"885f1ef5-027b-49b0-9c25-444a307d3075\") " pod="openstack/nova-api-0" Oct 01 14:05:53 crc kubenswrapper[4605]: I1001 14:05:53.651119 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/885f1ef5-027b-49b0-9c25-444a307d3075-config-data\") pod \"nova-api-0\" (UID: \"885f1ef5-027b-49b0-9c25-444a307d3075\") " pod="openstack/nova-api-0" Oct 01 14:05:53 crc kubenswrapper[4605]: I1001 14:05:53.661197 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-249v4\" (UniqueName: \"kubernetes.io/projected/885f1ef5-027b-49b0-9c25-444a307d3075-kube-api-access-249v4\") pod \"nova-api-0\" (UID: \"885f1ef5-027b-49b0-9c25-444a307d3075\") " pod="openstack/nova-api-0" Oct 01 14:05:53 crc kubenswrapper[4605]: I1001 14:05:53.708398 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 01 14:05:53 crc kubenswrapper[4605]: I1001 14:05:53.942812 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4ed8f49c-0b65-433b-b5a9-848e4e920b86" path="/var/lib/kubelet/pods/4ed8f49c-0b65-433b-b5a9-848e4e920b86/volumes" Oct 01 14:05:53 crc kubenswrapper[4605]: I1001 14:05:53.944046 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="65f69992-26f8-4f45-beeb-3aee499bf19e" path="/var/lib/kubelet/pods/65f69992-26f8-4f45-beeb-3aee499bf19e/volumes" Oct 01 14:05:54 crc kubenswrapper[4605]: I1001 14:05:54.171831 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 01 14:05:54 crc kubenswrapper[4605]: W1001 14:05:54.175041 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod885f1ef5_027b_49b0_9c25_444a307d3075.slice/crio-249c70aab4d5acf5d9b1d85d2b8b87fc6c2caa002e8a73e337ccb8fb66cea927 WatchSource:0}: Error finding container 249c70aab4d5acf5d9b1d85d2b8b87fc6c2caa002e8a73e337ccb8fb66cea927: Status 404 returned error can't find the container with id 249c70aab4d5acf5d9b1d85d2b8b87fc6c2caa002e8a73e337ccb8fb66cea927 Oct 01 14:05:54 crc kubenswrapper[4605]: I1001 14:05:54.316352 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"05f7dd13-a8f8-4263-ad0f-87d5972c6eb0","Type":"ContainerStarted","Data":"2614d68d3d03dd5bae55c249a9e787b9a34ee757499b91bb91621bb75c84f6ea"} Oct 01 14:05:54 crc kubenswrapper[4605]: I1001 14:05:54.316400 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"05f7dd13-a8f8-4263-ad0f-87d5972c6eb0","Type":"ContainerStarted","Data":"2b050c763514e9e5f715216d8f2cf5c87786ebadbe90a8b5b1fbc403f16438bb"} Oct 01 14:05:54 crc kubenswrapper[4605]: I1001 14:05:54.323451 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"885f1ef5-027b-49b0-9c25-444a307d3075","Type":"ContainerStarted","Data":"249c70aab4d5acf5d9b1d85d2b8b87fc6c2caa002e8a73e337ccb8fb66cea927"} Oct 01 14:05:55 crc kubenswrapper[4605]: I1001 14:05:55.333839 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"885f1ef5-027b-49b0-9c25-444a307d3075","Type":"ContainerStarted","Data":"1ffaae10c9f6520f8c39ee8405b40931050398d8ba2f27b13597d566890b5296"} Oct 01 14:05:55 crc kubenswrapper[4605]: I1001 14:05:55.334254 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"885f1ef5-027b-49b0-9c25-444a307d3075","Type":"ContainerStarted","Data":"a917ff482a1fbe4ce58eede0104752c28c83084c2a986a5458855deb9874f304"} Oct 01 14:05:55 crc kubenswrapper[4605]: I1001 14:05:55.358527 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.358508441 podStartE2EDuration="3.358508441s" podCreationTimestamp="2025-10-01 14:05:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 14:05:54.341283615 +0000 UTC m=+1277.085259823" watchObservedRunningTime="2025-10-01 14:05:55.358508441 +0000 UTC m=+1278.102484649" Oct 01 14:05:55 crc kubenswrapper[4605]: I1001 14:05:55.363842 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.363823356 podStartE2EDuration="2.363823356s" podCreationTimestamp="2025-10-01 14:05:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 14:05:55.354624913 +0000 UTC m=+1278.098601131" watchObservedRunningTime="2025-10-01 14:05:55.363823356 +0000 UTC m=+1278.107799554" Oct 01 14:05:56 crc kubenswrapper[4605]: I1001 14:05:56.698283 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Oct 01 14:05:57 crc kubenswrapper[4605]: I1001 14:05:57.684698 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 01 14:05:57 crc kubenswrapper[4605]: I1001 14:05:57.685350 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 01 14:06:01 crc kubenswrapper[4605]: I1001 14:06:01.697783 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Oct 01 14:06:01 crc kubenswrapper[4605]: I1001 14:06:01.726934 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Oct 01 14:06:02 crc kubenswrapper[4605]: I1001 14:06:02.417304 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Oct 01 14:06:02 crc kubenswrapper[4605]: I1001 14:06:02.685559 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 01 14:06:02 crc kubenswrapper[4605]: I1001 14:06:02.685595 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 01 14:06:03 crc kubenswrapper[4605]: I1001 14:06:03.701360 4605 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="05f7dd13-a8f8-4263-ad0f-87d5972c6eb0" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.205:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 01 14:06:03 crc kubenswrapper[4605]: I1001 14:06:03.701381 4605 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="05f7dd13-a8f8-4263-ad0f-87d5972c6eb0" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.205:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 01 14:06:03 crc kubenswrapper[4605]: I1001 14:06:03.708883 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 01 14:06:03 crc kubenswrapper[4605]: I1001 14:06:03.708937 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 01 14:06:04 crc kubenswrapper[4605]: I1001 14:06:04.725296 4605 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="885f1ef5-027b-49b0-9c25-444a307d3075" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.206:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 01 14:06:04 crc kubenswrapper[4605]: I1001 14:06:04.725365 4605 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="885f1ef5-027b-49b0-9c25-444a307d3075" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.206:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 01 14:06:06 crc kubenswrapper[4605]: I1001 14:06:06.489783 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Oct 01 14:06:12 crc kubenswrapper[4605]: I1001 14:06:12.692591 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 01 14:06:12 crc kubenswrapper[4605]: I1001 14:06:12.695650 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 01 14:06:12 crc kubenswrapper[4605]: I1001 14:06:12.700889 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 01 14:06:13 crc kubenswrapper[4605]: I1001 14:06:13.492913 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 01 14:06:13 crc kubenswrapper[4605]: I1001 14:06:13.717867 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 01 14:06:13 crc kubenswrapper[4605]: I1001 14:06:13.718004 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 01 14:06:13 crc kubenswrapper[4605]: I1001 14:06:13.718762 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 01 14:06:13 crc kubenswrapper[4605]: I1001 14:06:13.718929 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 01 14:06:13 crc kubenswrapper[4605]: I1001 14:06:13.725646 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 01 14:06:13 crc kubenswrapper[4605]: I1001 14:06:13.728513 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 01 14:06:22 crc kubenswrapper[4605]: I1001 14:06:22.373728 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 01 14:06:23 crc kubenswrapper[4605]: I1001 14:06:23.883595 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 01 14:06:27 crc kubenswrapper[4605]: I1001 14:06:27.398760 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="d753cd5e-e85d-424c-a439-2b51cbedf76f" containerName="rabbitmq" containerID="cri-o://a842dfd7b6d3c07d77db3601730bb1df84f7335badfbfd2b2c458020725e1c9a" gracePeriod=604795 Oct 01 14:06:28 crc kubenswrapper[4605]: I1001 14:06:28.406956 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="18fdf98a-ad5b-4930-b8cc-2422242aac16" containerName="rabbitmq" containerID="cri-o://b63b48430f83f00e87cf9e6c01f9f12ea65be589b73cbf994253d1f72ebb0ee4" gracePeriod=604796 Oct 01 14:06:33 crc kubenswrapper[4605]: I1001 14:06:33.671201 4605 generic.go:334] "Generic (PLEG): container finished" podID="d753cd5e-e85d-424c-a439-2b51cbedf76f" containerID="a842dfd7b6d3c07d77db3601730bb1df84f7335badfbfd2b2c458020725e1c9a" exitCode=0 Oct 01 14:06:33 crc kubenswrapper[4605]: I1001 14:06:33.671325 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"d753cd5e-e85d-424c-a439-2b51cbedf76f","Type":"ContainerDied","Data":"a842dfd7b6d3c07d77db3601730bb1df84f7335badfbfd2b2c458020725e1c9a"} Oct 01 14:06:34 crc kubenswrapper[4605]: I1001 14:06:34.075509 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 01 14:06:34 crc kubenswrapper[4605]: I1001 14:06:34.194413 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d753cd5e-e85d-424c-a439-2b51cbedf76f-pod-info\") pod \"d753cd5e-e85d-424c-a439-2b51cbedf76f\" (UID: \"d753cd5e-e85d-424c-a439-2b51cbedf76f\") " Oct 01 14:06:34 crc kubenswrapper[4605]: I1001 14:06:34.194472 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"d753cd5e-e85d-424c-a439-2b51cbedf76f\" (UID: \"d753cd5e-e85d-424c-a439-2b51cbedf76f\") " Oct 01 14:06:34 crc kubenswrapper[4605]: I1001 14:06:34.194589 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d753cd5e-e85d-424c-a439-2b51cbedf76f-erlang-cookie-secret\") pod \"d753cd5e-e85d-424c-a439-2b51cbedf76f\" (UID: \"d753cd5e-e85d-424c-a439-2b51cbedf76f\") " Oct 01 14:06:34 crc kubenswrapper[4605]: I1001 14:06:34.194637 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d753cd5e-e85d-424c-a439-2b51cbedf76f-rabbitmq-plugins\") pod \"d753cd5e-e85d-424c-a439-2b51cbedf76f\" (UID: \"d753cd5e-e85d-424c-a439-2b51cbedf76f\") " Oct 01 14:06:34 crc kubenswrapper[4605]: I1001 14:06:34.194736 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qfqwp\" (UniqueName: \"kubernetes.io/projected/d753cd5e-e85d-424c-a439-2b51cbedf76f-kube-api-access-qfqwp\") pod \"d753cd5e-e85d-424c-a439-2b51cbedf76f\" (UID: \"d753cd5e-e85d-424c-a439-2b51cbedf76f\") " Oct 01 14:06:34 crc kubenswrapper[4605]: I1001 14:06:34.194806 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d753cd5e-e85d-424c-a439-2b51cbedf76f-rabbitmq-erlang-cookie\") pod \"d753cd5e-e85d-424c-a439-2b51cbedf76f\" (UID: \"d753cd5e-e85d-424c-a439-2b51cbedf76f\") " Oct 01 14:06:34 crc kubenswrapper[4605]: I1001 14:06:34.194835 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/d753cd5e-e85d-424c-a439-2b51cbedf76f-rabbitmq-tls\") pod \"d753cd5e-e85d-424c-a439-2b51cbedf76f\" (UID: \"d753cd5e-e85d-424c-a439-2b51cbedf76f\") " Oct 01 14:06:34 crc kubenswrapper[4605]: I1001 14:06:34.194910 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d753cd5e-e85d-424c-a439-2b51cbedf76f-rabbitmq-confd\") pod \"d753cd5e-e85d-424c-a439-2b51cbedf76f\" (UID: \"d753cd5e-e85d-424c-a439-2b51cbedf76f\") " Oct 01 14:06:34 crc kubenswrapper[4605]: I1001 14:06:34.194946 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d753cd5e-e85d-424c-a439-2b51cbedf76f-config-data\") pod \"d753cd5e-e85d-424c-a439-2b51cbedf76f\" (UID: \"d753cd5e-e85d-424c-a439-2b51cbedf76f\") " Oct 01 14:06:34 crc kubenswrapper[4605]: I1001 14:06:34.194976 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d753cd5e-e85d-424c-a439-2b51cbedf76f-plugins-conf\") pod \"d753cd5e-e85d-424c-a439-2b51cbedf76f\" (UID: \"d753cd5e-e85d-424c-a439-2b51cbedf76f\") " Oct 01 14:06:34 crc kubenswrapper[4605]: I1001 14:06:34.195040 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d753cd5e-e85d-424c-a439-2b51cbedf76f-server-conf\") pod \"d753cd5e-e85d-424c-a439-2b51cbedf76f\" (UID: \"d753cd5e-e85d-424c-a439-2b51cbedf76f\") " Oct 01 14:06:34 crc kubenswrapper[4605]: I1001 14:06:34.199825 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d753cd5e-e85d-424c-a439-2b51cbedf76f-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "d753cd5e-e85d-424c-a439-2b51cbedf76f" (UID: "d753cd5e-e85d-424c-a439-2b51cbedf76f"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:06:34 crc kubenswrapper[4605]: I1001 14:06:34.199862 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d753cd5e-e85d-424c-a439-2b51cbedf76f-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "d753cd5e-e85d-424c-a439-2b51cbedf76f" (UID: "d753cd5e-e85d-424c-a439-2b51cbedf76f"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:06:34 crc kubenswrapper[4605]: I1001 14:06:34.202451 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/d753cd5e-e85d-424c-a439-2b51cbedf76f-pod-info" (OuterVolumeSpecName: "pod-info") pod "d753cd5e-e85d-424c-a439-2b51cbedf76f" (UID: "d753cd5e-e85d-424c-a439-2b51cbedf76f"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Oct 01 14:06:34 crc kubenswrapper[4605]: I1001 14:06:34.203920 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d753cd5e-e85d-424c-a439-2b51cbedf76f-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "d753cd5e-e85d-424c-a439-2b51cbedf76f" (UID: "d753cd5e-e85d-424c-a439-2b51cbedf76f"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:06:34 crc kubenswrapper[4605]: I1001 14:06:34.205063 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d753cd5e-e85d-424c-a439-2b51cbedf76f-kube-api-access-qfqwp" (OuterVolumeSpecName: "kube-api-access-qfqwp") pod "d753cd5e-e85d-424c-a439-2b51cbedf76f" (UID: "d753cd5e-e85d-424c-a439-2b51cbedf76f"). InnerVolumeSpecName "kube-api-access-qfqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:06:34 crc kubenswrapper[4605]: I1001 14:06:34.207995 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage06-crc" (OuterVolumeSpecName: "persistence") pod "d753cd5e-e85d-424c-a439-2b51cbedf76f" (UID: "d753cd5e-e85d-424c-a439-2b51cbedf76f"). InnerVolumeSpecName "local-storage06-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 01 14:06:34 crc kubenswrapper[4605]: I1001 14:06:34.209274 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d753cd5e-e85d-424c-a439-2b51cbedf76f-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "d753cd5e-e85d-424c-a439-2b51cbedf76f" (UID: "d753cd5e-e85d-424c-a439-2b51cbedf76f"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:06:34 crc kubenswrapper[4605]: I1001 14:06:34.210999 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d753cd5e-e85d-424c-a439-2b51cbedf76f-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "d753cd5e-e85d-424c-a439-2b51cbedf76f" (UID: "d753cd5e-e85d-424c-a439-2b51cbedf76f"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:06:34 crc kubenswrapper[4605]: I1001 14:06:34.233247 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d753cd5e-e85d-424c-a439-2b51cbedf76f-config-data" (OuterVolumeSpecName: "config-data") pod "d753cd5e-e85d-424c-a439-2b51cbedf76f" (UID: "d753cd5e-e85d-424c-a439-2b51cbedf76f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:06:34 crc kubenswrapper[4605]: I1001 14:06:34.282039 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d753cd5e-e85d-424c-a439-2b51cbedf76f-server-conf" (OuterVolumeSpecName: "server-conf") pod "d753cd5e-e85d-424c-a439-2b51cbedf76f" (UID: "d753cd5e-e85d-424c-a439-2b51cbedf76f"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:06:34 crc kubenswrapper[4605]: I1001 14:06:34.299796 4605 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d753cd5e-e85d-424c-a439-2b51cbedf76f-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Oct 01 14:06:34 crc kubenswrapper[4605]: I1001 14:06:34.299837 4605 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d753cd5e-e85d-424c-a439-2b51cbedf76f-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Oct 01 14:06:34 crc kubenswrapper[4605]: I1001 14:06:34.299853 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qfqwp\" (UniqueName: \"kubernetes.io/projected/d753cd5e-e85d-424c-a439-2b51cbedf76f-kube-api-access-qfqwp\") on node \"crc\" DevicePath \"\"" Oct 01 14:06:34 crc kubenswrapper[4605]: I1001 14:06:34.299867 4605 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d753cd5e-e85d-424c-a439-2b51cbedf76f-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Oct 01 14:06:34 crc kubenswrapper[4605]: I1001 14:06:34.299881 4605 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/d753cd5e-e85d-424c-a439-2b51cbedf76f-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Oct 01 14:06:34 crc kubenswrapper[4605]: I1001 14:06:34.299893 4605 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d753cd5e-e85d-424c-a439-2b51cbedf76f-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 14:06:34 crc kubenswrapper[4605]: I1001 14:06:34.299906 4605 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d753cd5e-e85d-424c-a439-2b51cbedf76f-plugins-conf\") on node \"crc\" DevicePath \"\"" Oct 01 14:06:34 crc kubenswrapper[4605]: I1001 14:06:34.299919 4605 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d753cd5e-e85d-424c-a439-2b51cbedf76f-server-conf\") on node \"crc\" DevicePath \"\"" Oct 01 14:06:34 crc kubenswrapper[4605]: I1001 14:06:34.299931 4605 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d753cd5e-e85d-424c-a439-2b51cbedf76f-pod-info\") on node \"crc\" DevicePath \"\"" Oct 01 14:06:34 crc kubenswrapper[4605]: I1001 14:06:34.299960 4605 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" " Oct 01 14:06:34 crc kubenswrapper[4605]: I1001 14:06:34.328434 4605 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage06-crc" (UniqueName: "kubernetes.io/local-volume/local-storage06-crc") on node "crc" Oct 01 14:06:34 crc kubenswrapper[4605]: I1001 14:06:34.339040 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d753cd5e-e85d-424c-a439-2b51cbedf76f-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "d753cd5e-e85d-424c-a439-2b51cbedf76f" (UID: "d753cd5e-e85d-424c-a439-2b51cbedf76f"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:06:34 crc kubenswrapper[4605]: I1001 14:06:34.401908 4605 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d753cd5e-e85d-424c-a439-2b51cbedf76f-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Oct 01 14:06:34 crc kubenswrapper[4605]: I1001 14:06:34.401947 4605 reconciler_common.go:293] "Volume detached for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" DevicePath \"\"" Oct 01 14:06:34 crc kubenswrapper[4605]: I1001 14:06:34.687402 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 01 14:06:34 crc kubenswrapper[4605]: I1001 14:06:34.687451 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"d753cd5e-e85d-424c-a439-2b51cbedf76f","Type":"ContainerDied","Data":"0f0186c30fd5f9c10d53cd3308c5d4b358d554720efb502d06501244c360c800"} Oct 01 14:06:34 crc kubenswrapper[4605]: I1001 14:06:34.687851 4605 scope.go:117] "RemoveContainer" containerID="a842dfd7b6d3c07d77db3601730bb1df84f7335badfbfd2b2c458020725e1c9a" Oct 01 14:06:34 crc kubenswrapper[4605]: I1001 14:06:34.705823 4605 generic.go:334] "Generic (PLEG): container finished" podID="18fdf98a-ad5b-4930-b8cc-2422242aac16" containerID="b63b48430f83f00e87cf9e6c01f9f12ea65be589b73cbf994253d1f72ebb0ee4" exitCode=0 Oct 01 14:06:34 crc kubenswrapper[4605]: I1001 14:06:34.705868 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"18fdf98a-ad5b-4930-b8cc-2422242aac16","Type":"ContainerDied","Data":"b63b48430f83f00e87cf9e6c01f9f12ea65be589b73cbf994253d1f72ebb0ee4"} Oct 01 14:06:34 crc kubenswrapper[4605]: I1001 14:06:34.734047 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 01 14:06:34 crc kubenswrapper[4605]: I1001 14:06:34.744600 4605 scope.go:117] "RemoveContainer" containerID="20882a754651de043ae04d6134122f8552e8704cf3c71bf28f4e3b8a9f4daab2" Oct 01 14:06:34 crc kubenswrapper[4605]: I1001 14:06:34.759780 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 01 14:06:34 crc kubenswrapper[4605]: I1001 14:06:34.791947 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Oct 01 14:06:34 crc kubenswrapper[4605]: E1001 14:06:34.792583 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d753cd5e-e85d-424c-a439-2b51cbedf76f" containerName="rabbitmq" Oct 01 14:06:34 crc kubenswrapper[4605]: I1001 14:06:34.792600 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="d753cd5e-e85d-424c-a439-2b51cbedf76f" containerName="rabbitmq" Oct 01 14:06:34 crc kubenswrapper[4605]: E1001 14:06:34.792620 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d753cd5e-e85d-424c-a439-2b51cbedf76f" containerName="setup-container" Oct 01 14:06:34 crc kubenswrapper[4605]: I1001 14:06:34.792628 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="d753cd5e-e85d-424c-a439-2b51cbedf76f" containerName="setup-container" Oct 01 14:06:34 crc kubenswrapper[4605]: I1001 14:06:34.792842 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="d753cd5e-e85d-424c-a439-2b51cbedf76f" containerName="rabbitmq" Oct 01 14:06:34 crc kubenswrapper[4605]: I1001 14:06:34.802193 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 01 14:06:34 crc kubenswrapper[4605]: I1001 14:06:34.805824 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Oct 01 14:06:34 crc kubenswrapper[4605]: I1001 14:06:34.807337 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Oct 01 14:06:34 crc kubenswrapper[4605]: I1001 14:06:34.812377 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Oct 01 14:06:34 crc kubenswrapper[4605]: I1001 14:06:34.812614 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Oct 01 14:06:34 crc kubenswrapper[4605]: I1001 14:06:34.812758 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Oct 01 14:06:34 crc kubenswrapper[4605]: I1001 14:06:34.812894 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-vrnd4" Oct 01 14:06:34 crc kubenswrapper[4605]: I1001 14:06:34.813019 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Oct 01 14:06:34 crc kubenswrapper[4605]: I1001 14:06:34.847521 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 01 14:06:34 crc kubenswrapper[4605]: I1001 14:06:34.922780 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/1b035ab1-17f0-4d9e-91d4-983b4cd06469-pod-info\") pod \"rabbitmq-server-0\" (UID: \"1b035ab1-17f0-4d9e-91d4-983b4cd06469\") " pod="openstack/rabbitmq-server-0" Oct 01 14:06:34 crc kubenswrapper[4605]: I1001 14:06:34.922878 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/1b035ab1-17f0-4d9e-91d4-983b4cd06469-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"1b035ab1-17f0-4d9e-91d4-983b4cd06469\") " pod="openstack/rabbitmq-server-0" Oct 01 14:06:34 crc kubenswrapper[4605]: I1001 14:06:34.922939 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1b035ab1-17f0-4d9e-91d4-983b4cd06469-config-data\") pod \"rabbitmq-server-0\" (UID: \"1b035ab1-17f0-4d9e-91d4-983b4cd06469\") " pod="openstack/rabbitmq-server-0" Oct 01 14:06:34 crc kubenswrapper[4605]: I1001 14:06:34.922997 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/1b035ab1-17f0-4d9e-91d4-983b4cd06469-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"1b035ab1-17f0-4d9e-91d4-983b4cd06469\") " pod="openstack/rabbitmq-server-0" Oct 01 14:06:34 crc kubenswrapper[4605]: I1001 14:06:34.923034 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/1b035ab1-17f0-4d9e-91d4-983b4cd06469-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"1b035ab1-17f0-4d9e-91d4-983b4cd06469\") " pod="openstack/rabbitmq-server-0" Oct 01 14:06:34 crc kubenswrapper[4605]: I1001 14:06:34.923048 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/1b035ab1-17f0-4d9e-91d4-983b4cd06469-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"1b035ab1-17f0-4d9e-91d4-983b4cd06469\") " pod="openstack/rabbitmq-server-0" Oct 01 14:06:34 crc kubenswrapper[4605]: I1001 14:06:34.927960 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/1b035ab1-17f0-4d9e-91d4-983b4cd06469-server-conf\") pod \"rabbitmq-server-0\" (UID: \"1b035ab1-17f0-4d9e-91d4-983b4cd06469\") " pod="openstack/rabbitmq-server-0" Oct 01 14:06:34 crc kubenswrapper[4605]: I1001 14:06:34.929240 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/1b035ab1-17f0-4d9e-91d4-983b4cd06469-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"1b035ab1-17f0-4d9e-91d4-983b4cd06469\") " pod="openstack/rabbitmq-server-0" Oct 01 14:06:34 crc kubenswrapper[4605]: I1001 14:06:34.929569 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xgrsc\" (UniqueName: \"kubernetes.io/projected/1b035ab1-17f0-4d9e-91d4-983b4cd06469-kube-api-access-xgrsc\") pod \"rabbitmq-server-0\" (UID: \"1b035ab1-17f0-4d9e-91d4-983b4cd06469\") " pod="openstack/rabbitmq-server-0" Oct 01 14:06:34 crc kubenswrapper[4605]: I1001 14:06:34.930025 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-server-0\" (UID: \"1b035ab1-17f0-4d9e-91d4-983b4cd06469\") " pod="openstack/rabbitmq-server-0" Oct 01 14:06:34 crc kubenswrapper[4605]: I1001 14:06:34.930124 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/1b035ab1-17f0-4d9e-91d4-983b4cd06469-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"1b035ab1-17f0-4d9e-91d4-983b4cd06469\") " pod="openstack/rabbitmq-server-0" Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.017138 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.031331 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/1b035ab1-17f0-4d9e-91d4-983b4cd06469-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"1b035ab1-17f0-4d9e-91d4-983b4cd06469\") " pod="openstack/rabbitmq-server-0" Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.031374 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/1b035ab1-17f0-4d9e-91d4-983b4cd06469-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"1b035ab1-17f0-4d9e-91d4-983b4cd06469\") " pod="openstack/rabbitmq-server-0" Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.031407 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/1b035ab1-17f0-4d9e-91d4-983b4cd06469-server-conf\") pod \"rabbitmq-server-0\" (UID: \"1b035ab1-17f0-4d9e-91d4-983b4cd06469\") " pod="openstack/rabbitmq-server-0" Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.032020 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/1b035ab1-17f0-4d9e-91d4-983b4cd06469-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"1b035ab1-17f0-4d9e-91d4-983b4cd06469\") " pod="openstack/rabbitmq-server-0" Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.032741 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/1b035ab1-17f0-4d9e-91d4-983b4cd06469-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"1b035ab1-17f0-4d9e-91d4-983b4cd06469\") " pod="openstack/rabbitmq-server-0" Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.035013 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/1b035ab1-17f0-4d9e-91d4-983b4cd06469-server-conf\") pod \"rabbitmq-server-0\" (UID: \"1b035ab1-17f0-4d9e-91d4-983b4cd06469\") " pod="openstack/rabbitmq-server-0" Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.035136 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/1b035ab1-17f0-4d9e-91d4-983b4cd06469-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"1b035ab1-17f0-4d9e-91d4-983b4cd06469\") " pod="openstack/rabbitmq-server-0" Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.035182 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xgrsc\" (UniqueName: \"kubernetes.io/projected/1b035ab1-17f0-4d9e-91d4-983b4cd06469-kube-api-access-xgrsc\") pod \"rabbitmq-server-0\" (UID: \"1b035ab1-17f0-4d9e-91d4-983b4cd06469\") " pod="openstack/rabbitmq-server-0" Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.035205 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-server-0\" (UID: \"1b035ab1-17f0-4d9e-91d4-983b4cd06469\") " pod="openstack/rabbitmq-server-0" Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.035257 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/1b035ab1-17f0-4d9e-91d4-983b4cd06469-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"1b035ab1-17f0-4d9e-91d4-983b4cd06469\") " pod="openstack/rabbitmq-server-0" Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.035293 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/1b035ab1-17f0-4d9e-91d4-983b4cd06469-pod-info\") pod \"rabbitmq-server-0\" (UID: \"1b035ab1-17f0-4d9e-91d4-983b4cd06469\") " pod="openstack/rabbitmq-server-0" Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.035340 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/1b035ab1-17f0-4d9e-91d4-983b4cd06469-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"1b035ab1-17f0-4d9e-91d4-983b4cd06469\") " pod="openstack/rabbitmq-server-0" Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.035379 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1b035ab1-17f0-4d9e-91d4-983b4cd06469-config-data\") pod \"rabbitmq-server-0\" (UID: \"1b035ab1-17f0-4d9e-91d4-983b4cd06469\") " pod="openstack/rabbitmq-server-0" Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.035436 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/1b035ab1-17f0-4d9e-91d4-983b4cd06469-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"1b035ab1-17f0-4d9e-91d4-983b4cd06469\") " pod="openstack/rabbitmq-server-0" Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.035685 4605 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-server-0\" (UID: \"1b035ab1-17f0-4d9e-91d4-983b4cd06469\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/rabbitmq-server-0" Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.035807 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/1b035ab1-17f0-4d9e-91d4-983b4cd06469-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"1b035ab1-17f0-4d9e-91d4-983b4cd06469\") " pod="openstack/rabbitmq-server-0" Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.036553 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1b035ab1-17f0-4d9e-91d4-983b4cd06469-config-data\") pod \"rabbitmq-server-0\" (UID: \"1b035ab1-17f0-4d9e-91d4-983b4cd06469\") " pod="openstack/rabbitmq-server-0" Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.039078 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/1b035ab1-17f0-4d9e-91d4-983b4cd06469-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"1b035ab1-17f0-4d9e-91d4-983b4cd06469\") " pod="openstack/rabbitmq-server-0" Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.039855 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/1b035ab1-17f0-4d9e-91d4-983b4cd06469-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"1b035ab1-17f0-4d9e-91d4-983b4cd06469\") " pod="openstack/rabbitmq-server-0" Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.055255 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/1b035ab1-17f0-4d9e-91d4-983b4cd06469-pod-info\") pod \"rabbitmq-server-0\" (UID: \"1b035ab1-17f0-4d9e-91d4-983b4cd06469\") " pod="openstack/rabbitmq-server-0" Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.055744 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/1b035ab1-17f0-4d9e-91d4-983b4cd06469-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"1b035ab1-17f0-4d9e-91d4-983b4cd06469\") " pod="openstack/rabbitmq-server-0" Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.081280 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xgrsc\" (UniqueName: \"kubernetes.io/projected/1b035ab1-17f0-4d9e-91d4-983b4cd06469-kube-api-access-xgrsc\") pod \"rabbitmq-server-0\" (UID: \"1b035ab1-17f0-4d9e-91d4-983b4cd06469\") " pod="openstack/rabbitmq-server-0" Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.095425 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-server-0\" (UID: \"1b035ab1-17f0-4d9e-91d4-983b4cd06469\") " pod="openstack/rabbitmq-server-0" Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.137357 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/18fdf98a-ad5b-4930-b8cc-2422242aac16-erlang-cookie-secret\") pod \"18fdf98a-ad5b-4930-b8cc-2422242aac16\" (UID: \"18fdf98a-ad5b-4930-b8cc-2422242aac16\") " Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.137431 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/18fdf98a-ad5b-4930-b8cc-2422242aac16-rabbitmq-plugins\") pod \"18fdf98a-ad5b-4930-b8cc-2422242aac16\" (UID: \"18fdf98a-ad5b-4930-b8cc-2422242aac16\") " Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.137560 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/18fdf98a-ad5b-4930-b8cc-2422242aac16-rabbitmq-confd\") pod \"18fdf98a-ad5b-4930-b8cc-2422242aac16\" (UID: \"18fdf98a-ad5b-4930-b8cc-2422242aac16\") " Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.137675 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dx5xn\" (UniqueName: \"kubernetes.io/projected/18fdf98a-ad5b-4930-b8cc-2422242aac16-kube-api-access-dx5xn\") pod \"18fdf98a-ad5b-4930-b8cc-2422242aac16\" (UID: \"18fdf98a-ad5b-4930-b8cc-2422242aac16\") " Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.137729 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/18fdf98a-ad5b-4930-b8cc-2422242aac16-plugins-conf\") pod \"18fdf98a-ad5b-4930-b8cc-2422242aac16\" (UID: \"18fdf98a-ad5b-4930-b8cc-2422242aac16\") " Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.137751 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/18fdf98a-ad5b-4930-b8cc-2422242aac16-rabbitmq-tls\") pod \"18fdf98a-ad5b-4930-b8cc-2422242aac16\" (UID: \"18fdf98a-ad5b-4930-b8cc-2422242aac16\") " Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.137812 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/18fdf98a-ad5b-4930-b8cc-2422242aac16-pod-info\") pod \"18fdf98a-ad5b-4930-b8cc-2422242aac16\" (UID: \"18fdf98a-ad5b-4930-b8cc-2422242aac16\") " Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.138308 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/18fdf98a-ad5b-4930-b8cc-2422242aac16-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "18fdf98a-ad5b-4930-b8cc-2422242aac16" (UID: "18fdf98a-ad5b-4930-b8cc-2422242aac16"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.138342 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/18fdf98a-ad5b-4930-b8cc-2422242aac16-server-conf\") pod \"18fdf98a-ad5b-4930-b8cc-2422242aac16\" (UID: \"18fdf98a-ad5b-4930-b8cc-2422242aac16\") " Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.138425 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/18fdf98a-ad5b-4930-b8cc-2422242aac16-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "18fdf98a-ad5b-4930-b8cc-2422242aac16" (UID: "18fdf98a-ad5b-4930-b8cc-2422242aac16"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.138482 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"18fdf98a-ad5b-4930-b8cc-2422242aac16\" (UID: \"18fdf98a-ad5b-4930-b8cc-2422242aac16\") " Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.141848 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/18fdf98a-ad5b-4930-b8cc-2422242aac16-config-data\") pod \"18fdf98a-ad5b-4930-b8cc-2422242aac16\" (UID: \"18fdf98a-ad5b-4930-b8cc-2422242aac16\") " Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.141998 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/18fdf98a-ad5b-4930-b8cc-2422242aac16-rabbitmq-erlang-cookie\") pod \"18fdf98a-ad5b-4930-b8cc-2422242aac16\" (UID: \"18fdf98a-ad5b-4930-b8cc-2422242aac16\") " Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.143385 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "persistence") pod "18fdf98a-ad5b-4930-b8cc-2422242aac16" (UID: "18fdf98a-ad5b-4930-b8cc-2422242aac16"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.143691 4605 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.143818 4605 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/18fdf98a-ad5b-4930-b8cc-2422242aac16-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.143931 4605 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/18fdf98a-ad5b-4930-b8cc-2422242aac16-plugins-conf\") on node \"crc\" DevicePath \"\"" Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.144079 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/18fdf98a-ad5b-4930-b8cc-2422242aac16-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "18fdf98a-ad5b-4930-b8cc-2422242aac16" (UID: "18fdf98a-ad5b-4930-b8cc-2422242aac16"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.144318 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/18fdf98a-ad5b-4930-b8cc-2422242aac16-pod-info" (OuterVolumeSpecName: "pod-info") pod "18fdf98a-ad5b-4930-b8cc-2422242aac16" (UID: "18fdf98a-ad5b-4930-b8cc-2422242aac16"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.144642 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/18fdf98a-ad5b-4930-b8cc-2422242aac16-kube-api-access-dx5xn" (OuterVolumeSpecName: "kube-api-access-dx5xn") pod "18fdf98a-ad5b-4930-b8cc-2422242aac16" (UID: "18fdf98a-ad5b-4930-b8cc-2422242aac16"). InnerVolumeSpecName "kube-api-access-dx5xn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.145956 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/18fdf98a-ad5b-4930-b8cc-2422242aac16-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "18fdf98a-ad5b-4930-b8cc-2422242aac16" (UID: "18fdf98a-ad5b-4930-b8cc-2422242aac16"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.156716 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/18fdf98a-ad5b-4930-b8cc-2422242aac16-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "18fdf98a-ad5b-4930-b8cc-2422242aac16" (UID: "18fdf98a-ad5b-4930-b8cc-2422242aac16"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.178582 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/18fdf98a-ad5b-4930-b8cc-2422242aac16-config-data" (OuterVolumeSpecName: "config-data") pod "18fdf98a-ad5b-4930-b8cc-2422242aac16" (UID: "18fdf98a-ad5b-4930-b8cc-2422242aac16"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.192795 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.195123 4605 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.224996 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/18fdf98a-ad5b-4930-b8cc-2422242aac16-server-conf" (OuterVolumeSpecName: "server-conf") pod "18fdf98a-ad5b-4930-b8cc-2422242aac16" (UID: "18fdf98a-ad5b-4930-b8cc-2422242aac16"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.250379 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dx5xn\" (UniqueName: \"kubernetes.io/projected/18fdf98a-ad5b-4930-b8cc-2422242aac16-kube-api-access-dx5xn\") on node \"crc\" DevicePath \"\"" Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.250415 4605 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/18fdf98a-ad5b-4930-b8cc-2422242aac16-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.250424 4605 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/18fdf98a-ad5b-4930-b8cc-2422242aac16-pod-info\") on node \"crc\" DevicePath \"\"" Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.250433 4605 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/18fdf98a-ad5b-4930-b8cc-2422242aac16-server-conf\") on node \"crc\" DevicePath \"\"" Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.250442 4605 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.250454 4605 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/18fdf98a-ad5b-4930-b8cc-2422242aac16-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.250462 4605 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/18fdf98a-ad5b-4930-b8cc-2422242aac16-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.250470 4605 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/18fdf98a-ad5b-4930-b8cc-2422242aac16-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.300409 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/18fdf98a-ad5b-4930-b8cc-2422242aac16-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "18fdf98a-ad5b-4930-b8cc-2422242aac16" (UID: "18fdf98a-ad5b-4930-b8cc-2422242aac16"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.352574 4605 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/18fdf98a-ad5b-4930-b8cc-2422242aac16-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.673048 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.751557 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"18fdf98a-ad5b-4930-b8cc-2422242aac16","Type":"ContainerDied","Data":"803101d80f0b191f7f7ec7b373785c442756cc9eeee4856ddf5b94aa5baea3e3"} Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.753078 4605 scope.go:117] "RemoveContainer" containerID="b63b48430f83f00e87cf9e6c01f9f12ea65be589b73cbf994253d1f72ebb0ee4" Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.753323 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.765120 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"1b035ab1-17f0-4d9e-91d4-983b4cd06469","Type":"ContainerStarted","Data":"6272a9da4f4b3e769f75b903e92ee2dbeb3d716396e82439a82a229ddff8b832"} Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.796039 4605 scope.go:117] "RemoveContainer" containerID="53e43957de2357c15334e32f90fbcc4bc89714062d29d00f8c8a8718dccf0c53" Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.815526 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.828301 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.854917 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 01 14:06:35 crc kubenswrapper[4605]: E1001 14:06:35.855398 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="18fdf98a-ad5b-4930-b8cc-2422242aac16" containerName="setup-container" Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.855420 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="18fdf98a-ad5b-4930-b8cc-2422242aac16" containerName="setup-container" Oct 01 14:06:35 crc kubenswrapper[4605]: E1001 14:06:35.855462 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="18fdf98a-ad5b-4930-b8cc-2422242aac16" containerName="rabbitmq" Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.855470 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="18fdf98a-ad5b-4930-b8cc-2422242aac16" containerName="rabbitmq" Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.855697 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="18fdf98a-ad5b-4930-b8cc-2422242aac16" containerName="rabbitmq" Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.856775 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.862362 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.862768 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.863060 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.863341 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.863553 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.863754 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.863955 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-2w8nd" Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.866453 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.943664 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="18fdf98a-ad5b-4930-b8cc-2422242aac16" path="/var/lib/kubelet/pods/18fdf98a-ad5b-4930-b8cc-2422242aac16/volumes" Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.944575 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d753cd5e-e85d-424c-a439-2b51cbedf76f" path="/var/lib/kubelet/pods/d753cd5e-e85d-424c-a439-2b51cbedf76f/volumes" Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.967118 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/9d1c480b-5fd5-4134-913c-19381d8f4db4-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"9d1c480b-5fd5-4134-913c-19381d8f4db4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.967158 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/9d1c480b-5fd5-4134-913c-19381d8f4db4-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"9d1c480b-5fd5-4134-913c-19381d8f4db4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.967461 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/9d1c480b-5fd5-4134-913c-19381d8f4db4-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"9d1c480b-5fd5-4134-913c-19381d8f4db4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.967511 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"9d1c480b-5fd5-4134-913c-19381d8f4db4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.967544 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9d1c480b-5fd5-4134-913c-19381d8f4db4-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"9d1c480b-5fd5-4134-913c-19381d8f4db4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.967587 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/9d1c480b-5fd5-4134-913c-19381d8f4db4-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"9d1c480b-5fd5-4134-913c-19381d8f4db4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.967648 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/9d1c480b-5fd5-4134-913c-19381d8f4db4-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"9d1c480b-5fd5-4134-913c-19381d8f4db4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.967674 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/9d1c480b-5fd5-4134-913c-19381d8f4db4-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"9d1c480b-5fd5-4134-913c-19381d8f4db4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.967706 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/9d1c480b-5fd5-4134-913c-19381d8f4db4-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"9d1c480b-5fd5-4134-913c-19381d8f4db4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.967785 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ndxfr\" (UniqueName: \"kubernetes.io/projected/9d1c480b-5fd5-4134-913c-19381d8f4db4-kube-api-access-ndxfr\") pod \"rabbitmq-cell1-server-0\" (UID: \"9d1c480b-5fd5-4134-913c-19381d8f4db4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:06:35 crc kubenswrapper[4605]: I1001 14:06:35.967877 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/9d1c480b-5fd5-4134-913c-19381d8f4db4-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"9d1c480b-5fd5-4134-913c-19381d8f4db4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:06:36 crc kubenswrapper[4605]: I1001 14:06:36.069251 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/9d1c480b-5fd5-4134-913c-19381d8f4db4-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"9d1c480b-5fd5-4134-913c-19381d8f4db4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:06:36 crc kubenswrapper[4605]: I1001 14:06:36.069351 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/9d1c480b-5fd5-4134-913c-19381d8f4db4-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"9d1c480b-5fd5-4134-913c-19381d8f4db4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:06:36 crc kubenswrapper[4605]: I1001 14:06:36.069373 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/9d1c480b-5fd5-4134-913c-19381d8f4db4-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"9d1c480b-5fd5-4134-913c-19381d8f4db4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:06:36 crc kubenswrapper[4605]: I1001 14:06:36.069394 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/9d1c480b-5fd5-4134-913c-19381d8f4db4-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"9d1c480b-5fd5-4134-913c-19381d8f4db4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:06:36 crc kubenswrapper[4605]: I1001 14:06:36.069747 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/9d1c480b-5fd5-4134-913c-19381d8f4db4-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"9d1c480b-5fd5-4134-913c-19381d8f4db4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:06:36 crc kubenswrapper[4605]: I1001 14:06:36.069788 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/9d1c480b-5fd5-4134-913c-19381d8f4db4-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"9d1c480b-5fd5-4134-913c-19381d8f4db4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:06:36 crc kubenswrapper[4605]: I1001 14:06:36.069865 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ndxfr\" (UniqueName: \"kubernetes.io/projected/9d1c480b-5fd5-4134-913c-19381d8f4db4-kube-api-access-ndxfr\") pod \"rabbitmq-cell1-server-0\" (UID: \"9d1c480b-5fd5-4134-913c-19381d8f4db4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:06:36 crc kubenswrapper[4605]: I1001 14:06:36.070280 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/9d1c480b-5fd5-4134-913c-19381d8f4db4-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"9d1c480b-5fd5-4134-913c-19381d8f4db4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:06:36 crc kubenswrapper[4605]: I1001 14:06:36.070304 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/9d1c480b-5fd5-4134-913c-19381d8f4db4-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"9d1c480b-5fd5-4134-913c-19381d8f4db4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:06:36 crc kubenswrapper[4605]: I1001 14:06:36.071048 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/9d1c480b-5fd5-4134-913c-19381d8f4db4-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"9d1c480b-5fd5-4134-913c-19381d8f4db4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:06:36 crc kubenswrapper[4605]: I1001 14:06:36.071079 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/9d1c480b-5fd5-4134-913c-19381d8f4db4-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"9d1c480b-5fd5-4134-913c-19381d8f4db4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:06:36 crc kubenswrapper[4605]: I1001 14:06:36.071532 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/9d1c480b-5fd5-4134-913c-19381d8f4db4-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"9d1c480b-5fd5-4134-913c-19381d8f4db4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:06:36 crc kubenswrapper[4605]: I1001 14:06:36.071827 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"9d1c480b-5fd5-4134-913c-19381d8f4db4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:06:36 crc kubenswrapper[4605]: I1001 14:06:36.071847 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9d1c480b-5fd5-4134-913c-19381d8f4db4-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"9d1c480b-5fd5-4134-913c-19381d8f4db4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:06:36 crc kubenswrapper[4605]: I1001 14:06:36.072445 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9d1c480b-5fd5-4134-913c-19381d8f4db4-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"9d1c480b-5fd5-4134-913c-19381d8f4db4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:06:36 crc kubenswrapper[4605]: I1001 14:06:36.072629 4605 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"9d1c480b-5fd5-4134-913c-19381d8f4db4\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:06:36 crc kubenswrapper[4605]: I1001 14:06:36.072717 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/9d1c480b-5fd5-4134-913c-19381d8f4db4-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"9d1c480b-5fd5-4134-913c-19381d8f4db4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:06:36 crc kubenswrapper[4605]: I1001 14:06:36.073707 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/9d1c480b-5fd5-4134-913c-19381d8f4db4-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"9d1c480b-5fd5-4134-913c-19381d8f4db4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:06:36 crc kubenswrapper[4605]: I1001 14:06:36.074646 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/9d1c480b-5fd5-4134-913c-19381d8f4db4-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"9d1c480b-5fd5-4134-913c-19381d8f4db4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:06:36 crc kubenswrapper[4605]: I1001 14:06:36.078565 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/9d1c480b-5fd5-4134-913c-19381d8f4db4-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"9d1c480b-5fd5-4134-913c-19381d8f4db4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:06:36 crc kubenswrapper[4605]: I1001 14:06:36.078936 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/9d1c480b-5fd5-4134-913c-19381d8f4db4-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"9d1c480b-5fd5-4134-913c-19381d8f4db4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:06:36 crc kubenswrapper[4605]: I1001 14:06:36.092054 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ndxfr\" (UniqueName: \"kubernetes.io/projected/9d1c480b-5fd5-4134-913c-19381d8f4db4-kube-api-access-ndxfr\") pod \"rabbitmq-cell1-server-0\" (UID: \"9d1c480b-5fd5-4134-913c-19381d8f4db4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:06:36 crc kubenswrapper[4605]: I1001 14:06:36.106159 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"9d1c480b-5fd5-4134-913c-19381d8f4db4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:06:36 crc kubenswrapper[4605]: I1001 14:06:36.216253 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:06:36 crc kubenswrapper[4605]: I1001 14:06:36.683679 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 01 14:06:36 crc kubenswrapper[4605]: I1001 14:06:36.778436 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"9d1c480b-5fd5-4134-913c-19381d8f4db4","Type":"ContainerStarted","Data":"5d86bb09ed8f43905d8dae70b86ce5158069c0c87ae3e5a3fdc0bf651ff40a17"} Oct 01 14:06:37 crc kubenswrapper[4605]: I1001 14:06:37.789341 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"1b035ab1-17f0-4d9e-91d4-983b4cd06469","Type":"ContainerStarted","Data":"23bf175371ff4e9f92937c13eb4b4d93c8bc210c7c5a9b0deff671efa4f37431"} Oct 01 14:06:37 crc kubenswrapper[4605]: I1001 14:06:37.869395 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-f565f"] Oct 01 14:06:37 crc kubenswrapper[4605]: I1001 14:06:37.873850 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67b789f86c-f565f" Oct 01 14:06:37 crc kubenswrapper[4605]: I1001 14:06:37.882863 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-f565f"] Oct 01 14:06:37 crc kubenswrapper[4605]: I1001 14:06:37.884566 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-edpm-ipam" Oct 01 14:06:38 crc kubenswrapper[4605]: I1001 14:06:38.008263 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8s86x\" (UniqueName: \"kubernetes.io/projected/d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0-kube-api-access-8s86x\") pod \"dnsmasq-dns-67b789f86c-f565f\" (UID: \"d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0\") " pod="openstack/dnsmasq-dns-67b789f86c-f565f" Oct 01 14:06:38 crc kubenswrapper[4605]: I1001 14:06:38.008315 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0-ovsdbserver-sb\") pod \"dnsmasq-dns-67b789f86c-f565f\" (UID: \"d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0\") " pod="openstack/dnsmasq-dns-67b789f86c-f565f" Oct 01 14:06:38 crc kubenswrapper[4605]: I1001 14:06:38.008348 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0-dns-swift-storage-0\") pod \"dnsmasq-dns-67b789f86c-f565f\" (UID: \"d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0\") " pod="openstack/dnsmasq-dns-67b789f86c-f565f" Oct 01 14:06:38 crc kubenswrapper[4605]: I1001 14:06:38.008369 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0-openstack-edpm-ipam\") pod \"dnsmasq-dns-67b789f86c-f565f\" (UID: \"d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0\") " pod="openstack/dnsmasq-dns-67b789f86c-f565f" Oct 01 14:06:38 crc kubenswrapper[4605]: I1001 14:06:38.008386 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0-config\") pod \"dnsmasq-dns-67b789f86c-f565f\" (UID: \"d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0\") " pod="openstack/dnsmasq-dns-67b789f86c-f565f" Oct 01 14:06:38 crc kubenswrapper[4605]: I1001 14:06:38.008437 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0-dns-svc\") pod \"dnsmasq-dns-67b789f86c-f565f\" (UID: \"d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0\") " pod="openstack/dnsmasq-dns-67b789f86c-f565f" Oct 01 14:06:38 crc kubenswrapper[4605]: I1001 14:06:38.008478 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0-ovsdbserver-nb\") pod \"dnsmasq-dns-67b789f86c-f565f\" (UID: \"d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0\") " pod="openstack/dnsmasq-dns-67b789f86c-f565f" Oct 01 14:06:38 crc kubenswrapper[4605]: I1001 14:06:38.110736 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8s86x\" (UniqueName: \"kubernetes.io/projected/d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0-kube-api-access-8s86x\") pod \"dnsmasq-dns-67b789f86c-f565f\" (UID: \"d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0\") " pod="openstack/dnsmasq-dns-67b789f86c-f565f" Oct 01 14:06:38 crc kubenswrapper[4605]: I1001 14:06:38.111025 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0-ovsdbserver-sb\") pod \"dnsmasq-dns-67b789f86c-f565f\" (UID: \"d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0\") " pod="openstack/dnsmasq-dns-67b789f86c-f565f" Oct 01 14:06:38 crc kubenswrapper[4605]: I1001 14:06:38.111190 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0-dns-swift-storage-0\") pod \"dnsmasq-dns-67b789f86c-f565f\" (UID: \"d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0\") " pod="openstack/dnsmasq-dns-67b789f86c-f565f" Oct 01 14:06:38 crc kubenswrapper[4605]: I1001 14:06:38.111319 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0-openstack-edpm-ipam\") pod \"dnsmasq-dns-67b789f86c-f565f\" (UID: \"d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0\") " pod="openstack/dnsmasq-dns-67b789f86c-f565f" Oct 01 14:06:38 crc kubenswrapper[4605]: I1001 14:06:38.111422 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0-config\") pod \"dnsmasq-dns-67b789f86c-f565f\" (UID: \"d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0\") " pod="openstack/dnsmasq-dns-67b789f86c-f565f" Oct 01 14:06:38 crc kubenswrapper[4605]: I1001 14:06:38.111568 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0-dns-svc\") pod \"dnsmasq-dns-67b789f86c-f565f\" (UID: \"d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0\") " pod="openstack/dnsmasq-dns-67b789f86c-f565f" Oct 01 14:06:38 crc kubenswrapper[4605]: I1001 14:06:38.111721 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0-ovsdbserver-nb\") pod \"dnsmasq-dns-67b789f86c-f565f\" (UID: \"d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0\") " pod="openstack/dnsmasq-dns-67b789f86c-f565f" Oct 01 14:06:38 crc kubenswrapper[4605]: I1001 14:06:38.111994 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0-ovsdbserver-sb\") pod \"dnsmasq-dns-67b789f86c-f565f\" (UID: \"d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0\") " pod="openstack/dnsmasq-dns-67b789f86c-f565f" Oct 01 14:06:38 crc kubenswrapper[4605]: I1001 14:06:38.112201 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0-dns-swift-storage-0\") pod \"dnsmasq-dns-67b789f86c-f565f\" (UID: \"d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0\") " pod="openstack/dnsmasq-dns-67b789f86c-f565f" Oct 01 14:06:38 crc kubenswrapper[4605]: I1001 14:06:38.112367 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0-config\") pod \"dnsmasq-dns-67b789f86c-f565f\" (UID: \"d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0\") " pod="openstack/dnsmasq-dns-67b789f86c-f565f" Oct 01 14:06:38 crc kubenswrapper[4605]: I1001 14:06:38.112443 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0-openstack-edpm-ipam\") pod \"dnsmasq-dns-67b789f86c-f565f\" (UID: \"d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0\") " pod="openstack/dnsmasq-dns-67b789f86c-f565f" Oct 01 14:06:38 crc kubenswrapper[4605]: I1001 14:06:38.112792 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0-dns-svc\") pod \"dnsmasq-dns-67b789f86c-f565f\" (UID: \"d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0\") " pod="openstack/dnsmasq-dns-67b789f86c-f565f" Oct 01 14:06:38 crc kubenswrapper[4605]: I1001 14:06:38.113322 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0-ovsdbserver-nb\") pod \"dnsmasq-dns-67b789f86c-f565f\" (UID: \"d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0\") " pod="openstack/dnsmasq-dns-67b789f86c-f565f" Oct 01 14:06:38 crc kubenswrapper[4605]: I1001 14:06:38.157514 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8s86x\" (UniqueName: \"kubernetes.io/projected/d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0-kube-api-access-8s86x\") pod \"dnsmasq-dns-67b789f86c-f565f\" (UID: \"d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0\") " pod="openstack/dnsmasq-dns-67b789f86c-f565f" Oct 01 14:06:38 crc kubenswrapper[4605]: I1001 14:06:38.202574 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67b789f86c-f565f" Oct 01 14:06:38 crc kubenswrapper[4605]: I1001 14:06:38.695112 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-f565f"] Oct 01 14:06:38 crc kubenswrapper[4605]: I1001 14:06:38.824936 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67b789f86c-f565f" event={"ID":"d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0","Type":"ContainerStarted","Data":"1c978df3dd1409ac4314459c26a3c7a3c62ec423c82f0866733a21ea5464a3d3"} Oct 01 14:06:38 crc kubenswrapper[4605]: I1001 14:06:38.827144 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"9d1c480b-5fd5-4134-913c-19381d8f4db4","Type":"ContainerStarted","Data":"3e3ab4c356f64f37ce047535a62dced8d0256344c8fd1d926d7e2cb026406c3f"} Oct 01 14:06:39 crc kubenswrapper[4605]: I1001 14:06:39.836541 4605 generic.go:334] "Generic (PLEG): container finished" podID="d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0" containerID="a819a0e5250c8f54476e649f588739381cfad5265f15930bdabc91a435a44402" exitCode=0 Oct 01 14:06:39 crc kubenswrapper[4605]: I1001 14:06:39.836651 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67b789f86c-f565f" event={"ID":"d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0","Type":"ContainerDied","Data":"a819a0e5250c8f54476e649f588739381cfad5265f15930bdabc91a435a44402"} Oct 01 14:06:40 crc kubenswrapper[4605]: I1001 14:06:40.848719 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67b789f86c-f565f" event={"ID":"d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0","Type":"ContainerStarted","Data":"395307f169c808f7a413f49e42aaa4d56003ad824edc53809a59d24f8117bbb5"} Oct 01 14:06:40 crc kubenswrapper[4605]: I1001 14:06:40.849257 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-67b789f86c-f565f" Oct 01 14:06:40 crc kubenswrapper[4605]: I1001 14:06:40.873072 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-67b789f86c-f565f" podStartSLOduration=3.8730541670000003 podStartE2EDuration="3.873054167s" podCreationTimestamp="2025-10-01 14:06:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 14:06:40.86444935 +0000 UTC m=+1323.608425598" watchObservedRunningTime="2025-10-01 14:06:40.873054167 +0000 UTC m=+1323.617030375" Oct 01 14:06:48 crc kubenswrapper[4605]: I1001 14:06:48.204287 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-67b789f86c-f565f" Oct 01 14:06:48 crc kubenswrapper[4605]: I1001 14:06:48.268220 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-nb6z5"] Oct 01 14:06:48 crc kubenswrapper[4605]: I1001 14:06:48.268440 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-59cf4bdb65-nb6z5" podUID="887a809b-9c1a-4f0f-94e5-a2afcb1f914f" containerName="dnsmasq-dns" containerID="cri-o://c2f92381a490a78b3e7e468e4a7466a1dada9f38ffe8ffd065bc1bb936a99cbd" gracePeriod=10 Oct 01 14:06:48 crc kubenswrapper[4605]: I1001 14:06:48.459860 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6bcf8b9d95-kpkj4"] Oct 01 14:06:48 crc kubenswrapper[4605]: I1001 14:06:48.461783 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bcf8b9d95-kpkj4" Oct 01 14:06:48 crc kubenswrapper[4605]: I1001 14:06:48.475182 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6bcf8b9d95-kpkj4"] Oct 01 14:06:48 crc kubenswrapper[4605]: I1001 14:06:48.508083 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/cfe22295-abd7-4094-b93e-3fb24d38242c-openstack-edpm-ipam\") pod \"dnsmasq-dns-6bcf8b9d95-kpkj4\" (UID: \"cfe22295-abd7-4094-b93e-3fb24d38242c\") " pod="openstack/dnsmasq-dns-6bcf8b9d95-kpkj4" Oct 01 14:06:48 crc kubenswrapper[4605]: I1001 14:06:48.508193 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cfe22295-abd7-4094-b93e-3fb24d38242c-ovsdbserver-sb\") pod \"dnsmasq-dns-6bcf8b9d95-kpkj4\" (UID: \"cfe22295-abd7-4094-b93e-3fb24d38242c\") " pod="openstack/dnsmasq-dns-6bcf8b9d95-kpkj4" Oct 01 14:06:48 crc kubenswrapper[4605]: I1001 14:06:48.508215 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4g9w2\" (UniqueName: \"kubernetes.io/projected/cfe22295-abd7-4094-b93e-3fb24d38242c-kube-api-access-4g9w2\") pod \"dnsmasq-dns-6bcf8b9d95-kpkj4\" (UID: \"cfe22295-abd7-4094-b93e-3fb24d38242c\") " pod="openstack/dnsmasq-dns-6bcf8b9d95-kpkj4" Oct 01 14:06:48 crc kubenswrapper[4605]: I1001 14:06:48.508238 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cfe22295-abd7-4094-b93e-3fb24d38242c-ovsdbserver-nb\") pod \"dnsmasq-dns-6bcf8b9d95-kpkj4\" (UID: \"cfe22295-abd7-4094-b93e-3fb24d38242c\") " pod="openstack/dnsmasq-dns-6bcf8b9d95-kpkj4" Oct 01 14:06:48 crc kubenswrapper[4605]: I1001 14:06:48.508309 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/cfe22295-abd7-4094-b93e-3fb24d38242c-dns-swift-storage-0\") pod \"dnsmasq-dns-6bcf8b9d95-kpkj4\" (UID: \"cfe22295-abd7-4094-b93e-3fb24d38242c\") " pod="openstack/dnsmasq-dns-6bcf8b9d95-kpkj4" Oct 01 14:06:48 crc kubenswrapper[4605]: I1001 14:06:48.508336 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cfe22295-abd7-4094-b93e-3fb24d38242c-dns-svc\") pod \"dnsmasq-dns-6bcf8b9d95-kpkj4\" (UID: \"cfe22295-abd7-4094-b93e-3fb24d38242c\") " pod="openstack/dnsmasq-dns-6bcf8b9d95-kpkj4" Oct 01 14:06:48 crc kubenswrapper[4605]: I1001 14:06:48.508367 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cfe22295-abd7-4094-b93e-3fb24d38242c-config\") pod \"dnsmasq-dns-6bcf8b9d95-kpkj4\" (UID: \"cfe22295-abd7-4094-b93e-3fb24d38242c\") " pod="openstack/dnsmasq-dns-6bcf8b9d95-kpkj4" Oct 01 14:06:48 crc kubenswrapper[4605]: I1001 14:06:48.610116 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/cfe22295-abd7-4094-b93e-3fb24d38242c-dns-swift-storage-0\") pod \"dnsmasq-dns-6bcf8b9d95-kpkj4\" (UID: \"cfe22295-abd7-4094-b93e-3fb24d38242c\") " pod="openstack/dnsmasq-dns-6bcf8b9d95-kpkj4" Oct 01 14:06:48 crc kubenswrapper[4605]: I1001 14:06:48.610159 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cfe22295-abd7-4094-b93e-3fb24d38242c-dns-svc\") pod \"dnsmasq-dns-6bcf8b9d95-kpkj4\" (UID: \"cfe22295-abd7-4094-b93e-3fb24d38242c\") " pod="openstack/dnsmasq-dns-6bcf8b9d95-kpkj4" Oct 01 14:06:48 crc kubenswrapper[4605]: I1001 14:06:48.610199 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cfe22295-abd7-4094-b93e-3fb24d38242c-config\") pod \"dnsmasq-dns-6bcf8b9d95-kpkj4\" (UID: \"cfe22295-abd7-4094-b93e-3fb24d38242c\") " pod="openstack/dnsmasq-dns-6bcf8b9d95-kpkj4" Oct 01 14:06:48 crc kubenswrapper[4605]: I1001 14:06:48.610228 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/cfe22295-abd7-4094-b93e-3fb24d38242c-openstack-edpm-ipam\") pod \"dnsmasq-dns-6bcf8b9d95-kpkj4\" (UID: \"cfe22295-abd7-4094-b93e-3fb24d38242c\") " pod="openstack/dnsmasq-dns-6bcf8b9d95-kpkj4" Oct 01 14:06:48 crc kubenswrapper[4605]: I1001 14:06:48.610302 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cfe22295-abd7-4094-b93e-3fb24d38242c-ovsdbserver-sb\") pod \"dnsmasq-dns-6bcf8b9d95-kpkj4\" (UID: \"cfe22295-abd7-4094-b93e-3fb24d38242c\") " pod="openstack/dnsmasq-dns-6bcf8b9d95-kpkj4" Oct 01 14:06:48 crc kubenswrapper[4605]: I1001 14:06:48.610324 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4g9w2\" (UniqueName: \"kubernetes.io/projected/cfe22295-abd7-4094-b93e-3fb24d38242c-kube-api-access-4g9w2\") pod \"dnsmasq-dns-6bcf8b9d95-kpkj4\" (UID: \"cfe22295-abd7-4094-b93e-3fb24d38242c\") " pod="openstack/dnsmasq-dns-6bcf8b9d95-kpkj4" Oct 01 14:06:48 crc kubenswrapper[4605]: I1001 14:06:48.610348 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cfe22295-abd7-4094-b93e-3fb24d38242c-ovsdbserver-nb\") pod \"dnsmasq-dns-6bcf8b9d95-kpkj4\" (UID: \"cfe22295-abd7-4094-b93e-3fb24d38242c\") " pod="openstack/dnsmasq-dns-6bcf8b9d95-kpkj4" Oct 01 14:06:48 crc kubenswrapper[4605]: I1001 14:06:48.611215 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cfe22295-abd7-4094-b93e-3fb24d38242c-ovsdbserver-nb\") pod \"dnsmasq-dns-6bcf8b9d95-kpkj4\" (UID: \"cfe22295-abd7-4094-b93e-3fb24d38242c\") " pod="openstack/dnsmasq-dns-6bcf8b9d95-kpkj4" Oct 01 14:06:48 crc kubenswrapper[4605]: I1001 14:06:48.611826 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/cfe22295-abd7-4094-b93e-3fb24d38242c-dns-swift-storage-0\") pod \"dnsmasq-dns-6bcf8b9d95-kpkj4\" (UID: \"cfe22295-abd7-4094-b93e-3fb24d38242c\") " pod="openstack/dnsmasq-dns-6bcf8b9d95-kpkj4" Oct 01 14:06:48 crc kubenswrapper[4605]: I1001 14:06:48.611932 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/cfe22295-abd7-4094-b93e-3fb24d38242c-openstack-edpm-ipam\") pod \"dnsmasq-dns-6bcf8b9d95-kpkj4\" (UID: \"cfe22295-abd7-4094-b93e-3fb24d38242c\") " pod="openstack/dnsmasq-dns-6bcf8b9d95-kpkj4" Oct 01 14:06:48 crc kubenswrapper[4605]: I1001 14:06:48.612493 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cfe22295-abd7-4094-b93e-3fb24d38242c-ovsdbserver-sb\") pod \"dnsmasq-dns-6bcf8b9d95-kpkj4\" (UID: \"cfe22295-abd7-4094-b93e-3fb24d38242c\") " pod="openstack/dnsmasq-dns-6bcf8b9d95-kpkj4" Oct 01 14:06:48 crc kubenswrapper[4605]: I1001 14:06:48.624815 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cfe22295-abd7-4094-b93e-3fb24d38242c-dns-svc\") pod \"dnsmasq-dns-6bcf8b9d95-kpkj4\" (UID: \"cfe22295-abd7-4094-b93e-3fb24d38242c\") " pod="openstack/dnsmasq-dns-6bcf8b9d95-kpkj4" Oct 01 14:06:48 crc kubenswrapper[4605]: I1001 14:06:48.630725 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cfe22295-abd7-4094-b93e-3fb24d38242c-config\") pod \"dnsmasq-dns-6bcf8b9d95-kpkj4\" (UID: \"cfe22295-abd7-4094-b93e-3fb24d38242c\") " pod="openstack/dnsmasq-dns-6bcf8b9d95-kpkj4" Oct 01 14:06:48 crc kubenswrapper[4605]: I1001 14:06:48.640955 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4g9w2\" (UniqueName: \"kubernetes.io/projected/cfe22295-abd7-4094-b93e-3fb24d38242c-kube-api-access-4g9w2\") pod \"dnsmasq-dns-6bcf8b9d95-kpkj4\" (UID: \"cfe22295-abd7-4094-b93e-3fb24d38242c\") " pod="openstack/dnsmasq-dns-6bcf8b9d95-kpkj4" Oct 01 14:06:48 crc kubenswrapper[4605]: I1001 14:06:48.749456 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59cf4bdb65-nb6z5" Oct 01 14:06:48 crc kubenswrapper[4605]: I1001 14:06:48.803678 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bcf8b9d95-kpkj4" Oct 01 14:06:48 crc kubenswrapper[4605]: I1001 14:06:48.819581 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tn7vk\" (UniqueName: \"kubernetes.io/projected/887a809b-9c1a-4f0f-94e5-a2afcb1f914f-kube-api-access-tn7vk\") pod \"887a809b-9c1a-4f0f-94e5-a2afcb1f914f\" (UID: \"887a809b-9c1a-4f0f-94e5-a2afcb1f914f\") " Oct 01 14:06:48 crc kubenswrapper[4605]: I1001 14:06:48.819698 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/887a809b-9c1a-4f0f-94e5-a2afcb1f914f-dns-swift-storage-0\") pod \"887a809b-9c1a-4f0f-94e5-a2afcb1f914f\" (UID: \"887a809b-9c1a-4f0f-94e5-a2afcb1f914f\") " Oct 01 14:06:48 crc kubenswrapper[4605]: I1001 14:06:48.819750 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/887a809b-9c1a-4f0f-94e5-a2afcb1f914f-dns-svc\") pod \"887a809b-9c1a-4f0f-94e5-a2afcb1f914f\" (UID: \"887a809b-9c1a-4f0f-94e5-a2afcb1f914f\") " Oct 01 14:06:48 crc kubenswrapper[4605]: I1001 14:06:48.819817 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/887a809b-9c1a-4f0f-94e5-a2afcb1f914f-config\") pod \"887a809b-9c1a-4f0f-94e5-a2afcb1f914f\" (UID: \"887a809b-9c1a-4f0f-94e5-a2afcb1f914f\") " Oct 01 14:06:48 crc kubenswrapper[4605]: I1001 14:06:48.819894 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/887a809b-9c1a-4f0f-94e5-a2afcb1f914f-ovsdbserver-nb\") pod \"887a809b-9c1a-4f0f-94e5-a2afcb1f914f\" (UID: \"887a809b-9c1a-4f0f-94e5-a2afcb1f914f\") " Oct 01 14:06:48 crc kubenswrapper[4605]: I1001 14:06:48.819916 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/887a809b-9c1a-4f0f-94e5-a2afcb1f914f-ovsdbserver-sb\") pod \"887a809b-9c1a-4f0f-94e5-a2afcb1f914f\" (UID: \"887a809b-9c1a-4f0f-94e5-a2afcb1f914f\") " Oct 01 14:06:48 crc kubenswrapper[4605]: I1001 14:06:48.830658 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/887a809b-9c1a-4f0f-94e5-a2afcb1f914f-kube-api-access-tn7vk" (OuterVolumeSpecName: "kube-api-access-tn7vk") pod "887a809b-9c1a-4f0f-94e5-a2afcb1f914f" (UID: "887a809b-9c1a-4f0f-94e5-a2afcb1f914f"). InnerVolumeSpecName "kube-api-access-tn7vk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:06:48 crc kubenswrapper[4605]: I1001 14:06:48.919898 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/887a809b-9c1a-4f0f-94e5-a2afcb1f914f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "887a809b-9c1a-4f0f-94e5-a2afcb1f914f" (UID: "887a809b-9c1a-4f0f-94e5-a2afcb1f914f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:06:48 crc kubenswrapper[4605]: I1001 14:06:48.920618 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/887a809b-9c1a-4f0f-94e5-a2afcb1f914f-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "887a809b-9c1a-4f0f-94e5-a2afcb1f914f" (UID: "887a809b-9c1a-4f0f-94e5-a2afcb1f914f"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:06:48 crc kubenswrapper[4605]: I1001 14:06:48.921851 4605 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/887a809b-9c1a-4f0f-94e5-a2afcb1f914f-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 01 14:06:48 crc kubenswrapper[4605]: I1001 14:06:48.921868 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tn7vk\" (UniqueName: \"kubernetes.io/projected/887a809b-9c1a-4f0f-94e5-a2afcb1f914f-kube-api-access-tn7vk\") on node \"crc\" DevicePath \"\"" Oct 01 14:06:48 crc kubenswrapper[4605]: I1001 14:06:48.921878 4605 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/887a809b-9c1a-4f0f-94e5-a2afcb1f914f-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 01 14:06:48 crc kubenswrapper[4605]: I1001 14:06:48.933571 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/887a809b-9c1a-4f0f-94e5-a2afcb1f914f-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "887a809b-9c1a-4f0f-94e5-a2afcb1f914f" (UID: "887a809b-9c1a-4f0f-94e5-a2afcb1f914f"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:06:48 crc kubenswrapper[4605]: I1001 14:06:48.934007 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/887a809b-9c1a-4f0f-94e5-a2afcb1f914f-config" (OuterVolumeSpecName: "config") pod "887a809b-9c1a-4f0f-94e5-a2afcb1f914f" (UID: "887a809b-9c1a-4f0f-94e5-a2afcb1f914f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:06:48 crc kubenswrapper[4605]: I1001 14:06:48.941501 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/887a809b-9c1a-4f0f-94e5-a2afcb1f914f-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "887a809b-9c1a-4f0f-94e5-a2afcb1f914f" (UID: "887a809b-9c1a-4f0f-94e5-a2afcb1f914f"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:06:48 crc kubenswrapper[4605]: I1001 14:06:48.950402 4605 generic.go:334] "Generic (PLEG): container finished" podID="887a809b-9c1a-4f0f-94e5-a2afcb1f914f" containerID="c2f92381a490a78b3e7e468e4a7466a1dada9f38ffe8ffd065bc1bb936a99cbd" exitCode=0 Oct 01 14:06:48 crc kubenswrapper[4605]: I1001 14:06:48.950443 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59cf4bdb65-nb6z5" event={"ID":"887a809b-9c1a-4f0f-94e5-a2afcb1f914f","Type":"ContainerDied","Data":"c2f92381a490a78b3e7e468e4a7466a1dada9f38ffe8ffd065bc1bb936a99cbd"} Oct 01 14:06:48 crc kubenswrapper[4605]: I1001 14:06:48.950469 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59cf4bdb65-nb6z5" event={"ID":"887a809b-9c1a-4f0f-94e5-a2afcb1f914f","Type":"ContainerDied","Data":"f28562bf48a3ee0f9a865cd7719a52fcf1600d92fada41e7a4d38c070d8c8b3d"} Oct 01 14:06:48 crc kubenswrapper[4605]: I1001 14:06:48.950487 4605 scope.go:117] "RemoveContainer" containerID="c2f92381a490a78b3e7e468e4a7466a1dada9f38ffe8ffd065bc1bb936a99cbd" Oct 01 14:06:48 crc kubenswrapper[4605]: I1001 14:06:48.950481 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59cf4bdb65-nb6z5" Oct 01 14:06:48 crc kubenswrapper[4605]: I1001 14:06:48.997712 4605 scope.go:117] "RemoveContainer" containerID="20bf734c72123fa46a78c01860a9373ecf8e085bb6c5bd494542ba9d8a44dd23" Oct 01 14:06:49 crc kubenswrapper[4605]: I1001 14:06:49.011522 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-nb6z5"] Oct 01 14:06:49 crc kubenswrapper[4605]: I1001 14:06:49.023013 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-nb6z5"] Oct 01 14:06:49 crc kubenswrapper[4605]: I1001 14:06:49.023959 4605 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/887a809b-9c1a-4f0f-94e5-a2afcb1f914f-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 01 14:06:49 crc kubenswrapper[4605]: I1001 14:06:49.023991 4605 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/887a809b-9c1a-4f0f-94e5-a2afcb1f914f-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 01 14:06:49 crc kubenswrapper[4605]: I1001 14:06:49.024002 4605 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/887a809b-9c1a-4f0f-94e5-a2afcb1f914f-config\") on node \"crc\" DevicePath \"\"" Oct 01 14:06:49 crc kubenswrapper[4605]: I1001 14:06:49.046630 4605 scope.go:117] "RemoveContainer" containerID="c2f92381a490a78b3e7e468e4a7466a1dada9f38ffe8ffd065bc1bb936a99cbd" Oct 01 14:06:49 crc kubenswrapper[4605]: E1001 14:06:49.047028 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c2f92381a490a78b3e7e468e4a7466a1dada9f38ffe8ffd065bc1bb936a99cbd\": container with ID starting with c2f92381a490a78b3e7e468e4a7466a1dada9f38ffe8ffd065bc1bb936a99cbd not found: ID does not exist" containerID="c2f92381a490a78b3e7e468e4a7466a1dada9f38ffe8ffd065bc1bb936a99cbd" Oct 01 14:06:49 crc kubenswrapper[4605]: I1001 14:06:49.047057 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c2f92381a490a78b3e7e468e4a7466a1dada9f38ffe8ffd065bc1bb936a99cbd"} err="failed to get container status \"c2f92381a490a78b3e7e468e4a7466a1dada9f38ffe8ffd065bc1bb936a99cbd\": rpc error: code = NotFound desc = could not find container \"c2f92381a490a78b3e7e468e4a7466a1dada9f38ffe8ffd065bc1bb936a99cbd\": container with ID starting with c2f92381a490a78b3e7e468e4a7466a1dada9f38ffe8ffd065bc1bb936a99cbd not found: ID does not exist" Oct 01 14:06:49 crc kubenswrapper[4605]: I1001 14:06:49.047077 4605 scope.go:117] "RemoveContainer" containerID="20bf734c72123fa46a78c01860a9373ecf8e085bb6c5bd494542ba9d8a44dd23" Oct 01 14:06:49 crc kubenswrapper[4605]: E1001 14:06:49.047402 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"20bf734c72123fa46a78c01860a9373ecf8e085bb6c5bd494542ba9d8a44dd23\": container with ID starting with 20bf734c72123fa46a78c01860a9373ecf8e085bb6c5bd494542ba9d8a44dd23 not found: ID does not exist" containerID="20bf734c72123fa46a78c01860a9373ecf8e085bb6c5bd494542ba9d8a44dd23" Oct 01 14:06:49 crc kubenswrapper[4605]: I1001 14:06:49.047422 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"20bf734c72123fa46a78c01860a9373ecf8e085bb6c5bd494542ba9d8a44dd23"} err="failed to get container status \"20bf734c72123fa46a78c01860a9373ecf8e085bb6c5bd494542ba9d8a44dd23\": rpc error: code = NotFound desc = could not find container \"20bf734c72123fa46a78c01860a9373ecf8e085bb6c5bd494542ba9d8a44dd23\": container with ID starting with 20bf734c72123fa46a78c01860a9373ecf8e085bb6c5bd494542ba9d8a44dd23 not found: ID does not exist" Oct 01 14:06:49 crc kubenswrapper[4605]: I1001 14:06:49.341233 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6bcf8b9d95-kpkj4"] Oct 01 14:06:49 crc kubenswrapper[4605]: I1001 14:06:49.937677 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="887a809b-9c1a-4f0f-94e5-a2afcb1f914f" path="/var/lib/kubelet/pods/887a809b-9c1a-4f0f-94e5-a2afcb1f914f/volumes" Oct 01 14:06:49 crc kubenswrapper[4605]: I1001 14:06:49.961319 4605 generic.go:334] "Generic (PLEG): container finished" podID="cfe22295-abd7-4094-b93e-3fb24d38242c" containerID="59bac35d99cb082b1ca9f832a819f9c0a26cb3bba95fc7f4a83a9bf12516fa3e" exitCode=0 Oct 01 14:06:49 crc kubenswrapper[4605]: I1001 14:06:49.961364 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bcf8b9d95-kpkj4" event={"ID":"cfe22295-abd7-4094-b93e-3fb24d38242c","Type":"ContainerDied","Data":"59bac35d99cb082b1ca9f832a819f9c0a26cb3bba95fc7f4a83a9bf12516fa3e"} Oct 01 14:06:49 crc kubenswrapper[4605]: I1001 14:06:49.961413 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bcf8b9d95-kpkj4" event={"ID":"cfe22295-abd7-4094-b93e-3fb24d38242c","Type":"ContainerStarted","Data":"fa9f724e07c2fc774eb5a3f17c0f287182a1664b39deb331cc4b0490710fee09"} Oct 01 14:06:50 crc kubenswrapper[4605]: I1001 14:06:50.974008 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bcf8b9d95-kpkj4" event={"ID":"cfe22295-abd7-4094-b93e-3fb24d38242c","Type":"ContainerStarted","Data":"5d123132a5f52ecb2eb90d8f9503b5ab92a41b88392221ba933a54dd2b75502b"} Oct 01 14:06:50 crc kubenswrapper[4605]: I1001 14:06:50.974349 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6bcf8b9d95-kpkj4" Oct 01 14:06:51 crc kubenswrapper[4605]: I1001 14:06:51.003409 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6bcf8b9d95-kpkj4" podStartSLOduration=3.003382344 podStartE2EDuration="3.003382344s" podCreationTimestamp="2025-10-01 14:06:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 14:06:50.992906379 +0000 UTC m=+1333.736882597" watchObservedRunningTime="2025-10-01 14:06:51.003382344 +0000 UTC m=+1333.747358552" Oct 01 14:06:51 crc kubenswrapper[4605]: I1001 14:06:51.631475 4605 patch_prober.go:28] interesting pod/machine-config-daemon-zdjh7 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 14:06:51 crc kubenswrapper[4605]: I1001 14:06:51.631721 4605 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 14:06:58 crc kubenswrapper[4605]: I1001 14:06:58.806580 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6bcf8b9d95-kpkj4" Oct 01 14:06:58 crc kubenswrapper[4605]: I1001 14:06:58.868188 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-f565f"] Oct 01 14:06:58 crc kubenswrapper[4605]: I1001 14:06:58.868435 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-67b789f86c-f565f" podUID="d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0" containerName="dnsmasq-dns" containerID="cri-o://395307f169c808f7a413f49e42aaa4d56003ad824edc53809a59d24f8117bbb5" gracePeriod=10 Oct 01 14:06:59 crc kubenswrapper[4605]: I1001 14:06:59.077417 4605 generic.go:334] "Generic (PLEG): container finished" podID="d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0" containerID="395307f169c808f7a413f49e42aaa4d56003ad824edc53809a59d24f8117bbb5" exitCode=0 Oct 01 14:06:59 crc kubenswrapper[4605]: I1001 14:06:59.077636 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67b789f86c-f565f" event={"ID":"d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0","Type":"ContainerDied","Data":"395307f169c808f7a413f49e42aaa4d56003ad824edc53809a59d24f8117bbb5"} Oct 01 14:06:59 crc kubenswrapper[4605]: I1001 14:06:59.414716 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67b789f86c-f565f" Oct 01 14:06:59 crc kubenswrapper[4605]: I1001 14:06:59.545513 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0-ovsdbserver-sb\") pod \"d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0\" (UID: \"d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0\") " Oct 01 14:06:59 crc kubenswrapper[4605]: I1001 14:06:59.545569 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0-dns-svc\") pod \"d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0\" (UID: \"d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0\") " Oct 01 14:06:59 crc kubenswrapper[4605]: I1001 14:06:59.545608 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0-config\") pod \"d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0\" (UID: \"d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0\") " Oct 01 14:06:59 crc kubenswrapper[4605]: I1001 14:06:59.545637 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8s86x\" (UniqueName: \"kubernetes.io/projected/d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0-kube-api-access-8s86x\") pod \"d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0\" (UID: \"d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0\") " Oct 01 14:06:59 crc kubenswrapper[4605]: I1001 14:06:59.545692 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0-openstack-edpm-ipam\") pod \"d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0\" (UID: \"d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0\") " Oct 01 14:06:59 crc kubenswrapper[4605]: I1001 14:06:59.545723 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0-ovsdbserver-nb\") pod \"d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0\" (UID: \"d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0\") " Oct 01 14:06:59 crc kubenswrapper[4605]: I1001 14:06:59.545884 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0-dns-swift-storage-0\") pod \"d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0\" (UID: \"d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0\") " Oct 01 14:06:59 crc kubenswrapper[4605]: I1001 14:06:59.558558 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0-kube-api-access-8s86x" (OuterVolumeSpecName: "kube-api-access-8s86x") pod "d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0" (UID: "d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0"). InnerVolumeSpecName "kube-api-access-8s86x". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:06:59 crc kubenswrapper[4605]: I1001 14:06:59.599850 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0" (UID: "d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:06:59 crc kubenswrapper[4605]: I1001 14:06:59.601952 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0" (UID: "d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:06:59 crc kubenswrapper[4605]: I1001 14:06:59.615280 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0" (UID: "d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:06:59 crc kubenswrapper[4605]: I1001 14:06:59.618292 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0" (UID: "d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:06:59 crc kubenswrapper[4605]: I1001 14:06:59.648761 4605 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 01 14:06:59 crc kubenswrapper[4605]: I1001 14:06:59.648866 4605 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 01 14:06:59 crc kubenswrapper[4605]: I1001 14:06:59.648922 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8s86x\" (UniqueName: \"kubernetes.io/projected/d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0-kube-api-access-8s86x\") on node \"crc\" DevicePath \"\"" Oct 01 14:06:59 crc kubenswrapper[4605]: I1001 14:06:59.648991 4605 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Oct 01 14:06:59 crc kubenswrapper[4605]: I1001 14:06:59.649046 4605 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 01 14:06:59 crc kubenswrapper[4605]: I1001 14:06:59.654036 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0" (UID: "d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:06:59 crc kubenswrapper[4605]: I1001 14:06:59.658782 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0-config" (OuterVolumeSpecName: "config") pod "d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0" (UID: "d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:06:59 crc kubenswrapper[4605]: I1001 14:06:59.750427 4605 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 01 14:06:59 crc kubenswrapper[4605]: I1001 14:06:59.750462 4605 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0-config\") on node \"crc\" DevicePath \"\"" Oct 01 14:07:00 crc kubenswrapper[4605]: I1001 14:07:00.086919 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67b789f86c-f565f" event={"ID":"d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0","Type":"ContainerDied","Data":"1c978df3dd1409ac4314459c26a3c7a3c62ec423c82f0866733a21ea5464a3d3"} Oct 01 14:07:00 crc kubenswrapper[4605]: I1001 14:07:00.086959 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67b789f86c-f565f" Oct 01 14:07:00 crc kubenswrapper[4605]: I1001 14:07:00.088234 4605 scope.go:117] "RemoveContainer" containerID="395307f169c808f7a413f49e42aaa4d56003ad824edc53809a59d24f8117bbb5" Oct 01 14:07:00 crc kubenswrapper[4605]: I1001 14:07:00.111506 4605 scope.go:117] "RemoveContainer" containerID="a819a0e5250c8f54476e649f588739381cfad5265f15930bdabc91a435a44402" Oct 01 14:07:00 crc kubenswrapper[4605]: I1001 14:07:00.113016 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-f565f"] Oct 01 14:07:00 crc kubenswrapper[4605]: I1001 14:07:00.122736 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-f565f"] Oct 01 14:07:01 crc kubenswrapper[4605]: I1001 14:07:01.940064 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0" path="/var/lib/kubelet/pods/d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0/volumes" Oct 01 14:07:09 crc kubenswrapper[4605]: I1001 14:07:09.173213 4605 generic.go:334] "Generic (PLEG): container finished" podID="1b035ab1-17f0-4d9e-91d4-983b4cd06469" containerID="23bf175371ff4e9f92937c13eb4b4d93c8bc210c7c5a9b0deff671efa4f37431" exitCode=0 Oct 01 14:07:09 crc kubenswrapper[4605]: I1001 14:07:09.173263 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"1b035ab1-17f0-4d9e-91d4-983b4cd06469","Type":"ContainerDied","Data":"23bf175371ff4e9f92937c13eb4b4d93c8bc210c7c5a9b0deff671efa4f37431"} Oct 01 14:07:10 crc kubenswrapper[4605]: I1001 14:07:10.184005 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"1b035ab1-17f0-4d9e-91d4-983b4cd06469","Type":"ContainerStarted","Data":"99d1147ba0b023c53a6b7ff17012509b9c2d90e4d8bbd75565cf567ddd5c1726"} Oct 01 14:07:10 crc kubenswrapper[4605]: I1001 14:07:10.185817 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Oct 01 14:07:10 crc kubenswrapper[4605]: I1001 14:07:10.208032 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=36.208010341 podStartE2EDuration="36.208010341s" podCreationTimestamp="2025-10-01 14:06:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 14:07:10.207827686 +0000 UTC m=+1352.951803904" watchObservedRunningTime="2025-10-01 14:07:10.208010341 +0000 UTC m=+1352.951986549" Oct 01 14:07:11 crc kubenswrapper[4605]: I1001 14:07:11.200402 4605 generic.go:334] "Generic (PLEG): container finished" podID="9d1c480b-5fd5-4134-913c-19381d8f4db4" containerID="3e3ab4c356f64f37ce047535a62dced8d0256344c8fd1d926d7e2cb026406c3f" exitCode=0 Oct 01 14:07:11 crc kubenswrapper[4605]: I1001 14:07:11.200475 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"9d1c480b-5fd5-4134-913c-19381d8f4db4","Type":"ContainerDied","Data":"3e3ab4c356f64f37ce047535a62dced8d0256344c8fd1d926d7e2cb026406c3f"} Oct 01 14:07:12 crc kubenswrapper[4605]: I1001 14:07:12.211341 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"9d1c480b-5fd5-4134-913c-19381d8f4db4","Type":"ContainerStarted","Data":"22b2859eb10e2f6c3a7e809bb223d29cd2c2b2bab779fe72fcc3f39d77624a22"} Oct 01 14:07:12 crc kubenswrapper[4605]: I1001 14:07:12.211886 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:07:12 crc kubenswrapper[4605]: I1001 14:07:12.246797 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=37.246775334 podStartE2EDuration="37.246775334s" podCreationTimestamp="2025-10-01 14:06:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 14:07:12.239141881 +0000 UTC m=+1354.983118089" watchObservedRunningTime="2025-10-01 14:07:12.246775334 +0000 UTC m=+1354.990751542" Oct 01 14:07:17 crc kubenswrapper[4605]: I1001 14:07:17.170744 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-s4rg2"] Oct 01 14:07:17 crc kubenswrapper[4605]: E1001 14:07:17.171815 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0" containerName="init" Oct 01 14:07:17 crc kubenswrapper[4605]: I1001 14:07:17.171832 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0" containerName="init" Oct 01 14:07:17 crc kubenswrapper[4605]: E1001 14:07:17.171853 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0" containerName="dnsmasq-dns" Oct 01 14:07:17 crc kubenswrapper[4605]: I1001 14:07:17.171862 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0" containerName="dnsmasq-dns" Oct 01 14:07:17 crc kubenswrapper[4605]: E1001 14:07:17.171880 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="887a809b-9c1a-4f0f-94e5-a2afcb1f914f" containerName="dnsmasq-dns" Oct 01 14:07:17 crc kubenswrapper[4605]: I1001 14:07:17.171888 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="887a809b-9c1a-4f0f-94e5-a2afcb1f914f" containerName="dnsmasq-dns" Oct 01 14:07:17 crc kubenswrapper[4605]: E1001 14:07:17.171906 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="887a809b-9c1a-4f0f-94e5-a2afcb1f914f" containerName="init" Oct 01 14:07:17 crc kubenswrapper[4605]: I1001 14:07:17.171913 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="887a809b-9c1a-4f0f-94e5-a2afcb1f914f" containerName="init" Oct 01 14:07:17 crc kubenswrapper[4605]: I1001 14:07:17.172157 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="887a809b-9c1a-4f0f-94e5-a2afcb1f914f" containerName="dnsmasq-dns" Oct 01 14:07:17 crc kubenswrapper[4605]: I1001 14:07:17.172183 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="d46d0a8e-2cd9-4f9e-8406-5a5e3cbf24e0" containerName="dnsmasq-dns" Oct 01 14:07:17 crc kubenswrapper[4605]: I1001 14:07:17.172920 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-s4rg2" Oct 01 14:07:17 crc kubenswrapper[4605]: I1001 14:07:17.181139 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-dpzpx" Oct 01 14:07:17 crc kubenswrapper[4605]: I1001 14:07:17.181152 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 01 14:07:17 crc kubenswrapper[4605]: I1001 14:07:17.181882 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 01 14:07:17 crc kubenswrapper[4605]: I1001 14:07:17.193048 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 01 14:07:17 crc kubenswrapper[4605]: I1001 14:07:17.203390 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-s4rg2"] Oct 01 14:07:17 crc kubenswrapper[4605]: I1001 14:07:17.284855 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9cd5ee34-51d2-4a40-9312-e83bf07927b7-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-s4rg2\" (UID: \"9cd5ee34-51d2-4a40-9312-e83bf07927b7\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-s4rg2" Oct 01 14:07:17 crc kubenswrapper[4605]: I1001 14:07:17.284940 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9cd5ee34-51d2-4a40-9312-e83bf07927b7-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-s4rg2\" (UID: \"9cd5ee34-51d2-4a40-9312-e83bf07927b7\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-s4rg2" Oct 01 14:07:17 crc kubenswrapper[4605]: I1001 14:07:17.285022 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9cd5ee34-51d2-4a40-9312-e83bf07927b7-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-s4rg2\" (UID: \"9cd5ee34-51d2-4a40-9312-e83bf07927b7\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-s4rg2" Oct 01 14:07:17 crc kubenswrapper[4605]: I1001 14:07:17.285051 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vf895\" (UniqueName: \"kubernetes.io/projected/9cd5ee34-51d2-4a40-9312-e83bf07927b7-kube-api-access-vf895\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-s4rg2\" (UID: \"9cd5ee34-51d2-4a40-9312-e83bf07927b7\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-s4rg2" Oct 01 14:07:17 crc kubenswrapper[4605]: I1001 14:07:17.386854 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vf895\" (UniqueName: \"kubernetes.io/projected/9cd5ee34-51d2-4a40-9312-e83bf07927b7-kube-api-access-vf895\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-s4rg2\" (UID: \"9cd5ee34-51d2-4a40-9312-e83bf07927b7\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-s4rg2" Oct 01 14:07:17 crc kubenswrapper[4605]: I1001 14:07:17.386975 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9cd5ee34-51d2-4a40-9312-e83bf07927b7-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-s4rg2\" (UID: \"9cd5ee34-51d2-4a40-9312-e83bf07927b7\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-s4rg2" Oct 01 14:07:17 crc kubenswrapper[4605]: I1001 14:07:17.387034 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9cd5ee34-51d2-4a40-9312-e83bf07927b7-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-s4rg2\" (UID: \"9cd5ee34-51d2-4a40-9312-e83bf07927b7\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-s4rg2" Oct 01 14:07:17 crc kubenswrapper[4605]: I1001 14:07:17.387159 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9cd5ee34-51d2-4a40-9312-e83bf07927b7-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-s4rg2\" (UID: \"9cd5ee34-51d2-4a40-9312-e83bf07927b7\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-s4rg2" Oct 01 14:07:17 crc kubenswrapper[4605]: I1001 14:07:17.393843 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9cd5ee34-51d2-4a40-9312-e83bf07927b7-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-s4rg2\" (UID: \"9cd5ee34-51d2-4a40-9312-e83bf07927b7\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-s4rg2" Oct 01 14:07:17 crc kubenswrapper[4605]: I1001 14:07:17.393894 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9cd5ee34-51d2-4a40-9312-e83bf07927b7-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-s4rg2\" (UID: \"9cd5ee34-51d2-4a40-9312-e83bf07927b7\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-s4rg2" Oct 01 14:07:17 crc kubenswrapper[4605]: I1001 14:07:17.409743 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9cd5ee34-51d2-4a40-9312-e83bf07927b7-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-s4rg2\" (UID: \"9cd5ee34-51d2-4a40-9312-e83bf07927b7\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-s4rg2" Oct 01 14:07:17 crc kubenswrapper[4605]: I1001 14:07:17.410193 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vf895\" (UniqueName: \"kubernetes.io/projected/9cd5ee34-51d2-4a40-9312-e83bf07927b7-kube-api-access-vf895\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-s4rg2\" (UID: \"9cd5ee34-51d2-4a40-9312-e83bf07927b7\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-s4rg2" Oct 01 14:07:17 crc kubenswrapper[4605]: I1001 14:07:17.505158 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-s4rg2" Oct 01 14:07:18 crc kubenswrapper[4605]: I1001 14:07:18.122306 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-s4rg2"] Oct 01 14:07:18 crc kubenswrapper[4605]: I1001 14:07:18.259312 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-s4rg2" event={"ID":"9cd5ee34-51d2-4a40-9312-e83bf07927b7","Type":"ContainerStarted","Data":"08ee9f1d7daa2e5ac41cd0e68ac68c04dfa9bb6d3eabe0ef065d8dbb061cc019"} Oct 01 14:07:21 crc kubenswrapper[4605]: I1001 14:07:21.630773 4605 patch_prober.go:28] interesting pod/machine-config-daemon-zdjh7 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 14:07:21 crc kubenswrapper[4605]: I1001 14:07:21.631177 4605 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 14:07:25 crc kubenswrapper[4605]: I1001 14:07:25.195319 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Oct 01 14:07:26 crc kubenswrapper[4605]: I1001 14:07:26.220306 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Oct 01 14:07:28 crc kubenswrapper[4605]: I1001 14:07:28.387794 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-s4rg2" event={"ID":"9cd5ee34-51d2-4a40-9312-e83bf07927b7","Type":"ContainerStarted","Data":"4ecedb32287a7a968e9b90cf5b4891a532ebdf19c15156928984968e34e90d09"} Oct 01 14:07:28 crc kubenswrapper[4605]: I1001 14:07:28.418715 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-s4rg2" podStartSLOduration=1.894538443 podStartE2EDuration="11.418696807s" podCreationTimestamp="2025-10-01 14:07:17 +0000 UTC" firstStartedPulling="2025-10-01 14:07:18.138877245 +0000 UTC m=+1360.882853453" lastFinishedPulling="2025-10-01 14:07:27.663035609 +0000 UTC m=+1370.407011817" observedRunningTime="2025-10-01 14:07:28.412433189 +0000 UTC m=+1371.156409397" watchObservedRunningTime="2025-10-01 14:07:28.418696807 +0000 UTC m=+1371.162673015" Oct 01 14:07:40 crc kubenswrapper[4605]: I1001 14:07:40.521872 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-s4rg2" event={"ID":"9cd5ee34-51d2-4a40-9312-e83bf07927b7","Type":"ContainerDied","Data":"4ecedb32287a7a968e9b90cf5b4891a532ebdf19c15156928984968e34e90d09"} Oct 01 14:07:40 crc kubenswrapper[4605]: I1001 14:07:40.521365 4605 generic.go:334] "Generic (PLEG): container finished" podID="9cd5ee34-51d2-4a40-9312-e83bf07927b7" containerID="4ecedb32287a7a968e9b90cf5b4891a532ebdf19c15156928984968e34e90d09" exitCode=0 Oct 01 14:07:42 crc kubenswrapper[4605]: I1001 14:07:42.016243 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-s4rg2" Oct 01 14:07:42 crc kubenswrapper[4605]: I1001 14:07:42.051620 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9cd5ee34-51d2-4a40-9312-e83bf07927b7-repo-setup-combined-ca-bundle\") pod \"9cd5ee34-51d2-4a40-9312-e83bf07927b7\" (UID: \"9cd5ee34-51d2-4a40-9312-e83bf07927b7\") " Oct 01 14:07:42 crc kubenswrapper[4605]: I1001 14:07:42.051734 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vf895\" (UniqueName: \"kubernetes.io/projected/9cd5ee34-51d2-4a40-9312-e83bf07927b7-kube-api-access-vf895\") pod \"9cd5ee34-51d2-4a40-9312-e83bf07927b7\" (UID: \"9cd5ee34-51d2-4a40-9312-e83bf07927b7\") " Oct 01 14:07:42 crc kubenswrapper[4605]: I1001 14:07:42.051812 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9cd5ee34-51d2-4a40-9312-e83bf07927b7-inventory\") pod \"9cd5ee34-51d2-4a40-9312-e83bf07927b7\" (UID: \"9cd5ee34-51d2-4a40-9312-e83bf07927b7\") " Oct 01 14:07:42 crc kubenswrapper[4605]: I1001 14:07:42.051940 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9cd5ee34-51d2-4a40-9312-e83bf07927b7-ssh-key\") pod \"9cd5ee34-51d2-4a40-9312-e83bf07927b7\" (UID: \"9cd5ee34-51d2-4a40-9312-e83bf07927b7\") " Oct 01 14:07:42 crc kubenswrapper[4605]: I1001 14:07:42.058540 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9cd5ee34-51d2-4a40-9312-e83bf07927b7-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "9cd5ee34-51d2-4a40-9312-e83bf07927b7" (UID: "9cd5ee34-51d2-4a40-9312-e83bf07927b7"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:07:42 crc kubenswrapper[4605]: I1001 14:07:42.058906 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9cd5ee34-51d2-4a40-9312-e83bf07927b7-kube-api-access-vf895" (OuterVolumeSpecName: "kube-api-access-vf895") pod "9cd5ee34-51d2-4a40-9312-e83bf07927b7" (UID: "9cd5ee34-51d2-4a40-9312-e83bf07927b7"). InnerVolumeSpecName "kube-api-access-vf895". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:07:42 crc kubenswrapper[4605]: I1001 14:07:42.082181 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9cd5ee34-51d2-4a40-9312-e83bf07927b7-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "9cd5ee34-51d2-4a40-9312-e83bf07927b7" (UID: "9cd5ee34-51d2-4a40-9312-e83bf07927b7"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:07:42 crc kubenswrapper[4605]: I1001 14:07:42.087804 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9cd5ee34-51d2-4a40-9312-e83bf07927b7-inventory" (OuterVolumeSpecName: "inventory") pod "9cd5ee34-51d2-4a40-9312-e83bf07927b7" (UID: "9cd5ee34-51d2-4a40-9312-e83bf07927b7"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:07:42 crc kubenswrapper[4605]: I1001 14:07:42.154068 4605 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9cd5ee34-51d2-4a40-9312-e83bf07927b7-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 14:07:42 crc kubenswrapper[4605]: I1001 14:07:42.154135 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vf895\" (UniqueName: \"kubernetes.io/projected/9cd5ee34-51d2-4a40-9312-e83bf07927b7-kube-api-access-vf895\") on node \"crc\" DevicePath \"\"" Oct 01 14:07:42 crc kubenswrapper[4605]: I1001 14:07:42.154151 4605 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9cd5ee34-51d2-4a40-9312-e83bf07927b7-inventory\") on node \"crc\" DevicePath \"\"" Oct 01 14:07:42 crc kubenswrapper[4605]: I1001 14:07:42.154179 4605 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9cd5ee34-51d2-4a40-9312-e83bf07927b7-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 01 14:07:42 crc kubenswrapper[4605]: I1001 14:07:42.542788 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-s4rg2" event={"ID":"9cd5ee34-51d2-4a40-9312-e83bf07927b7","Type":"ContainerDied","Data":"08ee9f1d7daa2e5ac41cd0e68ac68c04dfa9bb6d3eabe0ef065d8dbb061cc019"} Oct 01 14:07:42 crc kubenswrapper[4605]: I1001 14:07:42.542951 4605 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="08ee9f1d7daa2e5ac41cd0e68ac68c04dfa9bb6d3eabe0ef065d8dbb061cc019" Oct 01 14:07:42 crc kubenswrapper[4605]: I1001 14:07:42.543006 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-s4rg2" Oct 01 14:07:42 crc kubenswrapper[4605]: I1001 14:07:42.637500 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-tps9f"] Oct 01 14:07:42 crc kubenswrapper[4605]: E1001 14:07:42.637930 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9cd5ee34-51d2-4a40-9312-e83bf07927b7" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Oct 01 14:07:42 crc kubenswrapper[4605]: I1001 14:07:42.637949 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="9cd5ee34-51d2-4a40-9312-e83bf07927b7" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Oct 01 14:07:42 crc kubenswrapper[4605]: I1001 14:07:42.638138 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="9cd5ee34-51d2-4a40-9312-e83bf07927b7" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Oct 01 14:07:42 crc kubenswrapper[4605]: I1001 14:07:42.638731 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-tps9f" Oct 01 14:07:42 crc kubenswrapper[4605]: I1001 14:07:42.641961 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 01 14:07:42 crc kubenswrapper[4605]: I1001 14:07:42.642342 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 01 14:07:42 crc kubenswrapper[4605]: I1001 14:07:42.642553 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-dpzpx" Oct 01 14:07:42 crc kubenswrapper[4605]: I1001 14:07:42.642784 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 01 14:07:42 crc kubenswrapper[4605]: I1001 14:07:42.659406 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-tps9f"] Oct 01 14:07:42 crc kubenswrapper[4605]: I1001 14:07:42.665818 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6e328f7a-3f9b-48c7-b277-cf0f99b9bf86-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-tps9f\" (UID: \"6e328f7a-3f9b-48c7-b277-cf0f99b9bf86\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-tps9f" Oct 01 14:07:42 crc kubenswrapper[4605]: I1001 14:07:42.665892 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b4blm\" (UniqueName: \"kubernetes.io/projected/6e328f7a-3f9b-48c7-b277-cf0f99b9bf86-kube-api-access-b4blm\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-tps9f\" (UID: \"6e328f7a-3f9b-48c7-b277-cf0f99b9bf86\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-tps9f" Oct 01 14:07:42 crc kubenswrapper[4605]: I1001 14:07:42.665942 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6e328f7a-3f9b-48c7-b277-cf0f99b9bf86-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-tps9f\" (UID: \"6e328f7a-3f9b-48c7-b277-cf0f99b9bf86\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-tps9f" Oct 01 14:07:42 crc kubenswrapper[4605]: I1001 14:07:42.767358 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6e328f7a-3f9b-48c7-b277-cf0f99b9bf86-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-tps9f\" (UID: \"6e328f7a-3f9b-48c7-b277-cf0f99b9bf86\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-tps9f" Oct 01 14:07:42 crc kubenswrapper[4605]: I1001 14:07:42.767449 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b4blm\" (UniqueName: \"kubernetes.io/projected/6e328f7a-3f9b-48c7-b277-cf0f99b9bf86-kube-api-access-b4blm\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-tps9f\" (UID: \"6e328f7a-3f9b-48c7-b277-cf0f99b9bf86\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-tps9f" Oct 01 14:07:42 crc kubenswrapper[4605]: I1001 14:07:42.767539 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6e328f7a-3f9b-48c7-b277-cf0f99b9bf86-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-tps9f\" (UID: \"6e328f7a-3f9b-48c7-b277-cf0f99b9bf86\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-tps9f" Oct 01 14:07:42 crc kubenswrapper[4605]: I1001 14:07:42.775948 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6e328f7a-3f9b-48c7-b277-cf0f99b9bf86-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-tps9f\" (UID: \"6e328f7a-3f9b-48c7-b277-cf0f99b9bf86\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-tps9f" Oct 01 14:07:42 crc kubenswrapper[4605]: I1001 14:07:42.777703 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6e328f7a-3f9b-48c7-b277-cf0f99b9bf86-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-tps9f\" (UID: \"6e328f7a-3f9b-48c7-b277-cf0f99b9bf86\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-tps9f" Oct 01 14:07:42 crc kubenswrapper[4605]: I1001 14:07:42.784857 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b4blm\" (UniqueName: \"kubernetes.io/projected/6e328f7a-3f9b-48c7-b277-cf0f99b9bf86-kube-api-access-b4blm\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-tps9f\" (UID: \"6e328f7a-3f9b-48c7-b277-cf0f99b9bf86\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-tps9f" Oct 01 14:07:42 crc kubenswrapper[4605]: I1001 14:07:42.966582 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-tps9f" Oct 01 14:07:43 crc kubenswrapper[4605]: I1001 14:07:43.564362 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-tps9f"] Oct 01 14:07:44 crc kubenswrapper[4605]: I1001 14:07:44.562314 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-tps9f" event={"ID":"6e328f7a-3f9b-48c7-b277-cf0f99b9bf86","Type":"ContainerStarted","Data":"f03a3f6d54e7489d2980a4c236be743807ca5bcdc0dd931069133914b84cc317"} Oct 01 14:07:44 crc kubenswrapper[4605]: I1001 14:07:44.562623 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-tps9f" event={"ID":"6e328f7a-3f9b-48c7-b277-cf0f99b9bf86","Type":"ContainerStarted","Data":"683e3462a5ab3b5cc174e7dd701b93e35e3071993e8adfe7c7e3f60b84dad6c2"} Oct 01 14:07:44 crc kubenswrapper[4605]: I1001 14:07:44.594404 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-tps9f" podStartSLOduration=2.136548209 podStartE2EDuration="2.594383025s" podCreationTimestamp="2025-10-01 14:07:42 +0000 UTC" firstStartedPulling="2025-10-01 14:07:43.566219405 +0000 UTC m=+1386.310195633" lastFinishedPulling="2025-10-01 14:07:44.024054241 +0000 UTC m=+1386.768030449" observedRunningTime="2025-10-01 14:07:44.591902512 +0000 UTC m=+1387.335878750" watchObservedRunningTime="2025-10-01 14:07:44.594383025 +0000 UTC m=+1387.338359233" Oct 01 14:07:47 crc kubenswrapper[4605]: I1001 14:07:47.595076 4605 generic.go:334] "Generic (PLEG): container finished" podID="6e328f7a-3f9b-48c7-b277-cf0f99b9bf86" containerID="f03a3f6d54e7489d2980a4c236be743807ca5bcdc0dd931069133914b84cc317" exitCode=0 Oct 01 14:07:47 crc kubenswrapper[4605]: I1001 14:07:47.595150 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-tps9f" event={"ID":"6e328f7a-3f9b-48c7-b277-cf0f99b9bf86","Type":"ContainerDied","Data":"f03a3f6d54e7489d2980a4c236be743807ca5bcdc0dd931069133914b84cc317"} Oct 01 14:07:48 crc kubenswrapper[4605]: I1001 14:07:48.697789 4605 scope.go:117] "RemoveContainer" containerID="78d7b90a8edcea3607571e8dba0ff74ff511761fe7286bbaefd33115b3a5766b" Oct 01 14:07:49 crc kubenswrapper[4605]: I1001 14:07:49.061612 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-tps9f" Oct 01 14:07:49 crc kubenswrapper[4605]: I1001 14:07:49.095725 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6e328f7a-3f9b-48c7-b277-cf0f99b9bf86-ssh-key\") pod \"6e328f7a-3f9b-48c7-b277-cf0f99b9bf86\" (UID: \"6e328f7a-3f9b-48c7-b277-cf0f99b9bf86\") " Oct 01 14:07:49 crc kubenswrapper[4605]: I1001 14:07:49.096147 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b4blm\" (UniqueName: \"kubernetes.io/projected/6e328f7a-3f9b-48c7-b277-cf0f99b9bf86-kube-api-access-b4blm\") pod \"6e328f7a-3f9b-48c7-b277-cf0f99b9bf86\" (UID: \"6e328f7a-3f9b-48c7-b277-cf0f99b9bf86\") " Oct 01 14:07:49 crc kubenswrapper[4605]: I1001 14:07:49.096266 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6e328f7a-3f9b-48c7-b277-cf0f99b9bf86-inventory\") pod \"6e328f7a-3f9b-48c7-b277-cf0f99b9bf86\" (UID: \"6e328f7a-3f9b-48c7-b277-cf0f99b9bf86\") " Oct 01 14:07:49 crc kubenswrapper[4605]: I1001 14:07:49.118844 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6e328f7a-3f9b-48c7-b277-cf0f99b9bf86-kube-api-access-b4blm" (OuterVolumeSpecName: "kube-api-access-b4blm") pod "6e328f7a-3f9b-48c7-b277-cf0f99b9bf86" (UID: "6e328f7a-3f9b-48c7-b277-cf0f99b9bf86"). InnerVolumeSpecName "kube-api-access-b4blm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:07:49 crc kubenswrapper[4605]: I1001 14:07:49.133400 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6e328f7a-3f9b-48c7-b277-cf0f99b9bf86-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "6e328f7a-3f9b-48c7-b277-cf0f99b9bf86" (UID: "6e328f7a-3f9b-48c7-b277-cf0f99b9bf86"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:07:49 crc kubenswrapper[4605]: I1001 14:07:49.134499 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6e328f7a-3f9b-48c7-b277-cf0f99b9bf86-inventory" (OuterVolumeSpecName: "inventory") pod "6e328f7a-3f9b-48c7-b277-cf0f99b9bf86" (UID: "6e328f7a-3f9b-48c7-b277-cf0f99b9bf86"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:07:49 crc kubenswrapper[4605]: I1001 14:07:49.199339 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b4blm\" (UniqueName: \"kubernetes.io/projected/6e328f7a-3f9b-48c7-b277-cf0f99b9bf86-kube-api-access-b4blm\") on node \"crc\" DevicePath \"\"" Oct 01 14:07:49 crc kubenswrapper[4605]: I1001 14:07:49.199370 4605 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6e328f7a-3f9b-48c7-b277-cf0f99b9bf86-inventory\") on node \"crc\" DevicePath \"\"" Oct 01 14:07:49 crc kubenswrapper[4605]: I1001 14:07:49.199380 4605 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6e328f7a-3f9b-48c7-b277-cf0f99b9bf86-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 01 14:07:49 crc kubenswrapper[4605]: I1001 14:07:49.616247 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-tps9f" event={"ID":"6e328f7a-3f9b-48c7-b277-cf0f99b9bf86","Type":"ContainerDied","Data":"683e3462a5ab3b5cc174e7dd701b93e35e3071993e8adfe7c7e3f60b84dad6c2"} Oct 01 14:07:49 crc kubenswrapper[4605]: I1001 14:07:49.616470 4605 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="683e3462a5ab3b5cc174e7dd701b93e35e3071993e8adfe7c7e3f60b84dad6c2" Oct 01 14:07:49 crc kubenswrapper[4605]: I1001 14:07:49.616684 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-tps9f" Oct 01 14:07:49 crc kubenswrapper[4605]: I1001 14:07:49.702357 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-z9rb2"] Oct 01 14:07:49 crc kubenswrapper[4605]: E1001 14:07:49.703197 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e328f7a-3f9b-48c7-b277-cf0f99b9bf86" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Oct 01 14:07:49 crc kubenswrapper[4605]: I1001 14:07:49.703216 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e328f7a-3f9b-48c7-b277-cf0f99b9bf86" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Oct 01 14:07:49 crc kubenswrapper[4605]: I1001 14:07:49.703464 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e328f7a-3f9b-48c7-b277-cf0f99b9bf86" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Oct 01 14:07:49 crc kubenswrapper[4605]: I1001 14:07:49.704302 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-z9rb2" Oct 01 14:07:49 crc kubenswrapper[4605]: I1001 14:07:49.706390 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 01 14:07:49 crc kubenswrapper[4605]: I1001 14:07:49.706789 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 01 14:07:49 crc kubenswrapper[4605]: I1001 14:07:49.707058 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 01 14:07:49 crc kubenswrapper[4605]: I1001 14:07:49.720836 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-dpzpx" Oct 01 14:07:49 crc kubenswrapper[4605]: I1001 14:07:49.721719 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-z9rb2"] Oct 01 14:07:49 crc kubenswrapper[4605]: I1001 14:07:49.814548 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1ad20ed-a28e-4787-ba19-58f0d1a1e5b6-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-z9rb2\" (UID: \"b1ad20ed-a28e-4787-ba19-58f0d1a1e5b6\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-z9rb2" Oct 01 14:07:49 crc kubenswrapper[4605]: I1001 14:07:49.814645 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b1ad20ed-a28e-4787-ba19-58f0d1a1e5b6-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-z9rb2\" (UID: \"b1ad20ed-a28e-4787-ba19-58f0d1a1e5b6\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-z9rb2" Oct 01 14:07:49 crc kubenswrapper[4605]: I1001 14:07:49.814665 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lmr8k\" (UniqueName: \"kubernetes.io/projected/b1ad20ed-a28e-4787-ba19-58f0d1a1e5b6-kube-api-access-lmr8k\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-z9rb2\" (UID: \"b1ad20ed-a28e-4787-ba19-58f0d1a1e5b6\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-z9rb2" Oct 01 14:07:49 crc kubenswrapper[4605]: I1001 14:07:49.814747 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b1ad20ed-a28e-4787-ba19-58f0d1a1e5b6-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-z9rb2\" (UID: \"b1ad20ed-a28e-4787-ba19-58f0d1a1e5b6\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-z9rb2" Oct 01 14:07:49 crc kubenswrapper[4605]: I1001 14:07:49.916693 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b1ad20ed-a28e-4787-ba19-58f0d1a1e5b6-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-z9rb2\" (UID: \"b1ad20ed-a28e-4787-ba19-58f0d1a1e5b6\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-z9rb2" Oct 01 14:07:49 crc kubenswrapper[4605]: I1001 14:07:49.917154 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lmr8k\" (UniqueName: \"kubernetes.io/projected/b1ad20ed-a28e-4787-ba19-58f0d1a1e5b6-kube-api-access-lmr8k\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-z9rb2\" (UID: \"b1ad20ed-a28e-4787-ba19-58f0d1a1e5b6\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-z9rb2" Oct 01 14:07:49 crc kubenswrapper[4605]: I1001 14:07:49.917314 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b1ad20ed-a28e-4787-ba19-58f0d1a1e5b6-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-z9rb2\" (UID: \"b1ad20ed-a28e-4787-ba19-58f0d1a1e5b6\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-z9rb2" Oct 01 14:07:49 crc kubenswrapper[4605]: I1001 14:07:49.917401 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1ad20ed-a28e-4787-ba19-58f0d1a1e5b6-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-z9rb2\" (UID: \"b1ad20ed-a28e-4787-ba19-58f0d1a1e5b6\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-z9rb2" Oct 01 14:07:49 crc kubenswrapper[4605]: I1001 14:07:49.921442 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b1ad20ed-a28e-4787-ba19-58f0d1a1e5b6-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-z9rb2\" (UID: \"b1ad20ed-a28e-4787-ba19-58f0d1a1e5b6\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-z9rb2" Oct 01 14:07:49 crc kubenswrapper[4605]: I1001 14:07:49.921635 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b1ad20ed-a28e-4787-ba19-58f0d1a1e5b6-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-z9rb2\" (UID: \"b1ad20ed-a28e-4787-ba19-58f0d1a1e5b6\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-z9rb2" Oct 01 14:07:49 crc kubenswrapper[4605]: I1001 14:07:49.923221 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1ad20ed-a28e-4787-ba19-58f0d1a1e5b6-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-z9rb2\" (UID: \"b1ad20ed-a28e-4787-ba19-58f0d1a1e5b6\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-z9rb2" Oct 01 14:07:49 crc kubenswrapper[4605]: I1001 14:07:49.940054 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lmr8k\" (UniqueName: \"kubernetes.io/projected/b1ad20ed-a28e-4787-ba19-58f0d1a1e5b6-kube-api-access-lmr8k\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-z9rb2\" (UID: \"b1ad20ed-a28e-4787-ba19-58f0d1a1e5b6\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-z9rb2" Oct 01 14:07:50 crc kubenswrapper[4605]: I1001 14:07:50.029800 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-z9rb2" Oct 01 14:07:50 crc kubenswrapper[4605]: I1001 14:07:50.594524 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-z9rb2"] Oct 01 14:07:50 crc kubenswrapper[4605]: I1001 14:07:50.628388 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-z9rb2" event={"ID":"b1ad20ed-a28e-4787-ba19-58f0d1a1e5b6","Type":"ContainerStarted","Data":"ceb69b5117ad330c0e680e3f687ccc5dfdebb42b32ae731611953342e634a2fd"} Oct 01 14:07:51 crc kubenswrapper[4605]: I1001 14:07:51.630740 4605 patch_prober.go:28] interesting pod/machine-config-daemon-zdjh7 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 14:07:51 crc kubenswrapper[4605]: I1001 14:07:51.631294 4605 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 14:07:51 crc kubenswrapper[4605]: I1001 14:07:51.631343 4605 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" Oct 01 14:07:51 crc kubenswrapper[4605]: I1001 14:07:51.632102 4605 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"31f8ad2782362fc773c73e454aeec697a35f4e0956b1bf4d85878b45beec465b"} pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 01 14:07:51 crc kubenswrapper[4605]: I1001 14:07:51.632156 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" containerName="machine-config-daemon" containerID="cri-o://31f8ad2782362fc773c73e454aeec697a35f4e0956b1bf4d85878b45beec465b" gracePeriod=600 Oct 01 14:07:51 crc kubenswrapper[4605]: I1001 14:07:51.638608 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-z9rb2" event={"ID":"b1ad20ed-a28e-4787-ba19-58f0d1a1e5b6","Type":"ContainerStarted","Data":"c5b8cdab6513bc67aaa29445b0db3da140b1ffb3111078a17bba85a061209ddd"} Oct 01 14:07:51 crc kubenswrapper[4605]: I1001 14:07:51.670221 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-z9rb2" podStartSLOduration=2.123887926 podStartE2EDuration="2.670196972s" podCreationTimestamp="2025-10-01 14:07:49 +0000 UTC" firstStartedPulling="2025-10-01 14:07:50.60130509 +0000 UTC m=+1393.345281298" lastFinishedPulling="2025-10-01 14:07:51.147614136 +0000 UTC m=+1393.891590344" observedRunningTime="2025-10-01 14:07:51.665599815 +0000 UTC m=+1394.409576023" watchObservedRunningTime="2025-10-01 14:07:51.670196972 +0000 UTC m=+1394.414173180" Oct 01 14:07:52 crc kubenswrapper[4605]: I1001 14:07:52.650389 4605 generic.go:334] "Generic (PLEG): container finished" podID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" containerID="31f8ad2782362fc773c73e454aeec697a35f4e0956b1bf4d85878b45beec465b" exitCode=0 Oct 01 14:07:52 crc kubenswrapper[4605]: I1001 14:07:52.650482 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" event={"ID":"f3023060-c8ae-492b-b1cb-a418d9a8e59f","Type":"ContainerDied","Data":"31f8ad2782362fc773c73e454aeec697a35f4e0956b1bf4d85878b45beec465b"} Oct 01 14:07:52 crc kubenswrapper[4605]: I1001 14:07:52.650756 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" event={"ID":"f3023060-c8ae-492b-b1cb-a418d9a8e59f","Type":"ContainerStarted","Data":"3c03d2675f439d8ac4c6a292ee6900d11d4b18b69f30da560ed0997de5eb5196"} Oct 01 14:07:52 crc kubenswrapper[4605]: I1001 14:07:52.650779 4605 scope.go:117] "RemoveContainer" containerID="26228f282f385d65bcc8a30f3ba1b4954e3d59ec9adad591dd318d09c86924ce" Oct 01 14:08:15 crc kubenswrapper[4605]: I1001 14:08:15.136851 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-zqpz6"] Oct 01 14:08:15 crc kubenswrapper[4605]: I1001 14:08:15.139070 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zqpz6" Oct 01 14:08:15 crc kubenswrapper[4605]: I1001 14:08:15.150228 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-zqpz6"] Oct 01 14:08:15 crc kubenswrapper[4605]: I1001 14:08:15.194603 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4h45n\" (UniqueName: \"kubernetes.io/projected/0cfcc390-49fd-4592-b68f-b37575bc12d0-kube-api-access-4h45n\") pod \"redhat-marketplace-zqpz6\" (UID: \"0cfcc390-49fd-4592-b68f-b37575bc12d0\") " pod="openshift-marketplace/redhat-marketplace-zqpz6" Oct 01 14:08:15 crc kubenswrapper[4605]: I1001 14:08:15.194686 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0cfcc390-49fd-4592-b68f-b37575bc12d0-catalog-content\") pod \"redhat-marketplace-zqpz6\" (UID: \"0cfcc390-49fd-4592-b68f-b37575bc12d0\") " pod="openshift-marketplace/redhat-marketplace-zqpz6" Oct 01 14:08:15 crc kubenswrapper[4605]: I1001 14:08:15.194737 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0cfcc390-49fd-4592-b68f-b37575bc12d0-utilities\") pod \"redhat-marketplace-zqpz6\" (UID: \"0cfcc390-49fd-4592-b68f-b37575bc12d0\") " pod="openshift-marketplace/redhat-marketplace-zqpz6" Oct 01 14:08:15 crc kubenswrapper[4605]: I1001 14:08:15.296256 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4h45n\" (UniqueName: \"kubernetes.io/projected/0cfcc390-49fd-4592-b68f-b37575bc12d0-kube-api-access-4h45n\") pod \"redhat-marketplace-zqpz6\" (UID: \"0cfcc390-49fd-4592-b68f-b37575bc12d0\") " pod="openshift-marketplace/redhat-marketplace-zqpz6" Oct 01 14:08:15 crc kubenswrapper[4605]: I1001 14:08:15.296352 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0cfcc390-49fd-4592-b68f-b37575bc12d0-catalog-content\") pod \"redhat-marketplace-zqpz6\" (UID: \"0cfcc390-49fd-4592-b68f-b37575bc12d0\") " pod="openshift-marketplace/redhat-marketplace-zqpz6" Oct 01 14:08:15 crc kubenswrapper[4605]: I1001 14:08:15.296448 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0cfcc390-49fd-4592-b68f-b37575bc12d0-utilities\") pod \"redhat-marketplace-zqpz6\" (UID: \"0cfcc390-49fd-4592-b68f-b37575bc12d0\") " pod="openshift-marketplace/redhat-marketplace-zqpz6" Oct 01 14:08:15 crc kubenswrapper[4605]: I1001 14:08:15.296974 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0cfcc390-49fd-4592-b68f-b37575bc12d0-utilities\") pod \"redhat-marketplace-zqpz6\" (UID: \"0cfcc390-49fd-4592-b68f-b37575bc12d0\") " pod="openshift-marketplace/redhat-marketplace-zqpz6" Oct 01 14:08:15 crc kubenswrapper[4605]: I1001 14:08:15.297762 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0cfcc390-49fd-4592-b68f-b37575bc12d0-catalog-content\") pod \"redhat-marketplace-zqpz6\" (UID: \"0cfcc390-49fd-4592-b68f-b37575bc12d0\") " pod="openshift-marketplace/redhat-marketplace-zqpz6" Oct 01 14:08:15 crc kubenswrapper[4605]: I1001 14:08:15.315620 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4h45n\" (UniqueName: \"kubernetes.io/projected/0cfcc390-49fd-4592-b68f-b37575bc12d0-kube-api-access-4h45n\") pod \"redhat-marketplace-zqpz6\" (UID: \"0cfcc390-49fd-4592-b68f-b37575bc12d0\") " pod="openshift-marketplace/redhat-marketplace-zqpz6" Oct 01 14:08:15 crc kubenswrapper[4605]: I1001 14:08:15.457911 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zqpz6" Oct 01 14:08:15 crc kubenswrapper[4605]: I1001 14:08:15.980791 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-zqpz6"] Oct 01 14:08:16 crc kubenswrapper[4605]: I1001 14:08:16.891146 4605 generic.go:334] "Generic (PLEG): container finished" podID="0cfcc390-49fd-4592-b68f-b37575bc12d0" containerID="4f9d90ab1c51d6da2586ba72d6f8a1c58af1e63cdee423d33d4dd07dd0d8fa8d" exitCode=0 Oct 01 14:08:16 crc kubenswrapper[4605]: I1001 14:08:16.891729 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zqpz6" event={"ID":"0cfcc390-49fd-4592-b68f-b37575bc12d0","Type":"ContainerDied","Data":"4f9d90ab1c51d6da2586ba72d6f8a1c58af1e63cdee423d33d4dd07dd0d8fa8d"} Oct 01 14:08:16 crc kubenswrapper[4605]: I1001 14:08:16.891763 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zqpz6" event={"ID":"0cfcc390-49fd-4592-b68f-b37575bc12d0","Type":"ContainerStarted","Data":"4201a66fd74479fe53a050c2110aaeb97d50f5cf3a00acb45b6f52733bbee98e"} Oct 01 14:08:17 crc kubenswrapper[4605]: I1001 14:08:17.902155 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zqpz6" event={"ID":"0cfcc390-49fd-4592-b68f-b37575bc12d0","Type":"ContainerStarted","Data":"c615f8a3931a81cb32ff8c90d8d8bcb30c8a456acc7c85168bdb35897bce5d6a"} Oct 01 14:08:18 crc kubenswrapper[4605]: I1001 14:08:18.912611 4605 generic.go:334] "Generic (PLEG): container finished" podID="0cfcc390-49fd-4592-b68f-b37575bc12d0" containerID="c615f8a3931a81cb32ff8c90d8d8bcb30c8a456acc7c85168bdb35897bce5d6a" exitCode=0 Oct 01 14:08:18 crc kubenswrapper[4605]: I1001 14:08:18.912652 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zqpz6" event={"ID":"0cfcc390-49fd-4592-b68f-b37575bc12d0","Type":"ContainerDied","Data":"c615f8a3931a81cb32ff8c90d8d8bcb30c8a456acc7c85168bdb35897bce5d6a"} Oct 01 14:08:19 crc kubenswrapper[4605]: I1001 14:08:19.922538 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zqpz6" event={"ID":"0cfcc390-49fd-4592-b68f-b37575bc12d0","Type":"ContainerStarted","Data":"5d22e681ac8d3a296ac192d666552ca1ef834ebf5ac9f6e011bb833fb7296c18"} Oct 01 14:08:19 crc kubenswrapper[4605]: I1001 14:08:19.956862 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-zqpz6" podStartSLOduration=2.485392553 podStartE2EDuration="4.956834986s" podCreationTimestamp="2025-10-01 14:08:15 +0000 UTC" firstStartedPulling="2025-10-01 14:08:16.897669998 +0000 UTC m=+1419.641646206" lastFinishedPulling="2025-10-01 14:08:19.369112431 +0000 UTC m=+1422.113088639" observedRunningTime="2025-10-01 14:08:19.952810454 +0000 UTC m=+1422.696786662" watchObservedRunningTime="2025-10-01 14:08:19.956834986 +0000 UTC m=+1422.700811194" Oct 01 14:08:22 crc kubenswrapper[4605]: I1001 14:08:22.524597 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-s8vv8"] Oct 01 14:08:22 crc kubenswrapper[4605]: I1001 14:08:22.527725 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-s8vv8" Oct 01 14:08:22 crc kubenswrapper[4605]: I1001 14:08:22.537363 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-s8vv8"] Oct 01 14:08:22 crc kubenswrapper[4605]: I1001 14:08:22.648267 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c87e4571-b973-4400-835f-1affd142c33d-utilities\") pod \"redhat-operators-s8vv8\" (UID: \"c87e4571-b973-4400-835f-1affd142c33d\") " pod="openshift-marketplace/redhat-operators-s8vv8" Oct 01 14:08:22 crc kubenswrapper[4605]: I1001 14:08:22.648414 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c87e4571-b973-4400-835f-1affd142c33d-catalog-content\") pod \"redhat-operators-s8vv8\" (UID: \"c87e4571-b973-4400-835f-1affd142c33d\") " pod="openshift-marketplace/redhat-operators-s8vv8" Oct 01 14:08:22 crc kubenswrapper[4605]: I1001 14:08:22.648513 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-856f8\" (UniqueName: \"kubernetes.io/projected/c87e4571-b973-4400-835f-1affd142c33d-kube-api-access-856f8\") pod \"redhat-operators-s8vv8\" (UID: \"c87e4571-b973-4400-835f-1affd142c33d\") " pod="openshift-marketplace/redhat-operators-s8vv8" Oct 01 14:08:22 crc kubenswrapper[4605]: I1001 14:08:22.750279 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-856f8\" (UniqueName: \"kubernetes.io/projected/c87e4571-b973-4400-835f-1affd142c33d-kube-api-access-856f8\") pod \"redhat-operators-s8vv8\" (UID: \"c87e4571-b973-4400-835f-1affd142c33d\") " pod="openshift-marketplace/redhat-operators-s8vv8" Oct 01 14:08:22 crc kubenswrapper[4605]: I1001 14:08:22.750430 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c87e4571-b973-4400-835f-1affd142c33d-utilities\") pod \"redhat-operators-s8vv8\" (UID: \"c87e4571-b973-4400-835f-1affd142c33d\") " pod="openshift-marketplace/redhat-operators-s8vv8" Oct 01 14:08:22 crc kubenswrapper[4605]: I1001 14:08:22.750529 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c87e4571-b973-4400-835f-1affd142c33d-catalog-content\") pod \"redhat-operators-s8vv8\" (UID: \"c87e4571-b973-4400-835f-1affd142c33d\") " pod="openshift-marketplace/redhat-operators-s8vv8" Oct 01 14:08:22 crc kubenswrapper[4605]: I1001 14:08:22.751198 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c87e4571-b973-4400-835f-1affd142c33d-catalog-content\") pod \"redhat-operators-s8vv8\" (UID: \"c87e4571-b973-4400-835f-1affd142c33d\") " pod="openshift-marketplace/redhat-operators-s8vv8" Oct 01 14:08:22 crc kubenswrapper[4605]: I1001 14:08:22.751274 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c87e4571-b973-4400-835f-1affd142c33d-utilities\") pod \"redhat-operators-s8vv8\" (UID: \"c87e4571-b973-4400-835f-1affd142c33d\") " pod="openshift-marketplace/redhat-operators-s8vv8" Oct 01 14:08:22 crc kubenswrapper[4605]: I1001 14:08:22.772681 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-856f8\" (UniqueName: \"kubernetes.io/projected/c87e4571-b973-4400-835f-1affd142c33d-kube-api-access-856f8\") pod \"redhat-operators-s8vv8\" (UID: \"c87e4571-b973-4400-835f-1affd142c33d\") " pod="openshift-marketplace/redhat-operators-s8vv8" Oct 01 14:08:22 crc kubenswrapper[4605]: I1001 14:08:22.845965 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-s8vv8" Oct 01 14:08:23 crc kubenswrapper[4605]: I1001 14:08:23.370325 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-s8vv8"] Oct 01 14:08:23 crc kubenswrapper[4605]: I1001 14:08:23.973262 4605 generic.go:334] "Generic (PLEG): container finished" podID="c87e4571-b973-4400-835f-1affd142c33d" containerID="d5729e3fbf9fa34d6e91c2e5b99bc346003c867aaf5cb593bc7bcf7414b8802d" exitCode=0 Oct 01 14:08:23 crc kubenswrapper[4605]: I1001 14:08:23.973432 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s8vv8" event={"ID":"c87e4571-b973-4400-835f-1affd142c33d","Type":"ContainerDied","Data":"d5729e3fbf9fa34d6e91c2e5b99bc346003c867aaf5cb593bc7bcf7414b8802d"} Oct 01 14:08:23 crc kubenswrapper[4605]: I1001 14:08:23.973596 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s8vv8" event={"ID":"c87e4571-b973-4400-835f-1affd142c33d","Type":"ContainerStarted","Data":"d9d2b16e0ae6c46c15bf40a2b194de5e91a667979bc55578aaf8d6db80ec4177"} Oct 01 14:08:24 crc kubenswrapper[4605]: I1001 14:08:24.985495 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s8vv8" event={"ID":"c87e4571-b973-4400-835f-1affd142c33d","Type":"ContainerStarted","Data":"22999a4ce75ed1f84bf39ce107049296aa2e8d0d41bda4a803953351c6fa86b8"} Oct 01 14:08:25 crc kubenswrapper[4605]: I1001 14:08:25.460039 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-zqpz6" Oct 01 14:08:25 crc kubenswrapper[4605]: I1001 14:08:25.460167 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-zqpz6" Oct 01 14:08:25 crc kubenswrapper[4605]: I1001 14:08:25.516812 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-zqpz6" Oct 01 14:08:26 crc kubenswrapper[4605]: I1001 14:08:26.061951 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-zqpz6" Oct 01 14:08:27 crc kubenswrapper[4605]: I1001 14:08:27.706685 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-zqpz6"] Oct 01 14:08:28 crc kubenswrapper[4605]: I1001 14:08:28.018368 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-zqpz6" podUID="0cfcc390-49fd-4592-b68f-b37575bc12d0" containerName="registry-server" containerID="cri-o://5d22e681ac8d3a296ac192d666552ca1ef834ebf5ac9f6e011bb833fb7296c18" gracePeriod=2 Oct 01 14:08:29 crc kubenswrapper[4605]: I1001 14:08:29.028123 4605 generic.go:334] "Generic (PLEG): container finished" podID="c87e4571-b973-4400-835f-1affd142c33d" containerID="22999a4ce75ed1f84bf39ce107049296aa2e8d0d41bda4a803953351c6fa86b8" exitCode=0 Oct 01 14:08:29 crc kubenswrapper[4605]: I1001 14:08:29.029243 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s8vv8" event={"ID":"c87e4571-b973-4400-835f-1affd142c33d","Type":"ContainerDied","Data":"22999a4ce75ed1f84bf39ce107049296aa2e8d0d41bda4a803953351c6fa86b8"} Oct 01 14:08:29 crc kubenswrapper[4605]: I1001 14:08:29.033910 4605 generic.go:334] "Generic (PLEG): container finished" podID="0cfcc390-49fd-4592-b68f-b37575bc12d0" containerID="5d22e681ac8d3a296ac192d666552ca1ef834ebf5ac9f6e011bb833fb7296c18" exitCode=0 Oct 01 14:08:29 crc kubenswrapper[4605]: I1001 14:08:29.033962 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zqpz6" event={"ID":"0cfcc390-49fd-4592-b68f-b37575bc12d0","Type":"ContainerDied","Data":"5d22e681ac8d3a296ac192d666552ca1ef834ebf5ac9f6e011bb833fb7296c18"} Oct 01 14:08:29 crc kubenswrapper[4605]: I1001 14:08:29.369250 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zqpz6" Oct 01 14:08:29 crc kubenswrapper[4605]: I1001 14:08:29.489118 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0cfcc390-49fd-4592-b68f-b37575bc12d0-catalog-content\") pod \"0cfcc390-49fd-4592-b68f-b37575bc12d0\" (UID: \"0cfcc390-49fd-4592-b68f-b37575bc12d0\") " Oct 01 14:08:29 crc kubenswrapper[4605]: I1001 14:08:29.489287 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4h45n\" (UniqueName: \"kubernetes.io/projected/0cfcc390-49fd-4592-b68f-b37575bc12d0-kube-api-access-4h45n\") pod \"0cfcc390-49fd-4592-b68f-b37575bc12d0\" (UID: \"0cfcc390-49fd-4592-b68f-b37575bc12d0\") " Oct 01 14:08:29 crc kubenswrapper[4605]: I1001 14:08:29.489367 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0cfcc390-49fd-4592-b68f-b37575bc12d0-utilities\") pod \"0cfcc390-49fd-4592-b68f-b37575bc12d0\" (UID: \"0cfcc390-49fd-4592-b68f-b37575bc12d0\") " Oct 01 14:08:29 crc kubenswrapper[4605]: I1001 14:08:29.490129 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0cfcc390-49fd-4592-b68f-b37575bc12d0-utilities" (OuterVolumeSpecName: "utilities") pod "0cfcc390-49fd-4592-b68f-b37575bc12d0" (UID: "0cfcc390-49fd-4592-b68f-b37575bc12d0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:08:29 crc kubenswrapper[4605]: I1001 14:08:29.497774 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0cfcc390-49fd-4592-b68f-b37575bc12d0-kube-api-access-4h45n" (OuterVolumeSpecName: "kube-api-access-4h45n") pod "0cfcc390-49fd-4592-b68f-b37575bc12d0" (UID: "0cfcc390-49fd-4592-b68f-b37575bc12d0"). InnerVolumeSpecName "kube-api-access-4h45n". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:08:29 crc kubenswrapper[4605]: I1001 14:08:29.502902 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0cfcc390-49fd-4592-b68f-b37575bc12d0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0cfcc390-49fd-4592-b68f-b37575bc12d0" (UID: "0cfcc390-49fd-4592-b68f-b37575bc12d0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:08:29 crc kubenswrapper[4605]: I1001 14:08:29.591073 4605 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0cfcc390-49fd-4592-b68f-b37575bc12d0-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 14:08:29 crc kubenswrapper[4605]: I1001 14:08:29.591132 4605 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0cfcc390-49fd-4592-b68f-b37575bc12d0-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 14:08:29 crc kubenswrapper[4605]: I1001 14:08:29.591146 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4h45n\" (UniqueName: \"kubernetes.io/projected/0cfcc390-49fd-4592-b68f-b37575bc12d0-kube-api-access-4h45n\") on node \"crc\" DevicePath \"\"" Oct 01 14:08:30 crc kubenswrapper[4605]: I1001 14:08:30.044679 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s8vv8" event={"ID":"c87e4571-b973-4400-835f-1affd142c33d","Type":"ContainerStarted","Data":"2a2043e3f97180b1d8f1155502240fcd847e14f3fe722840e20c5e5887f87085"} Oct 01 14:08:30 crc kubenswrapper[4605]: I1001 14:08:30.047115 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zqpz6" event={"ID":"0cfcc390-49fd-4592-b68f-b37575bc12d0","Type":"ContainerDied","Data":"4201a66fd74479fe53a050c2110aaeb97d50f5cf3a00acb45b6f52733bbee98e"} Oct 01 14:08:30 crc kubenswrapper[4605]: I1001 14:08:30.047151 4605 scope.go:117] "RemoveContainer" containerID="5d22e681ac8d3a296ac192d666552ca1ef834ebf5ac9f6e011bb833fb7296c18" Oct 01 14:08:30 crc kubenswrapper[4605]: I1001 14:08:30.047274 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zqpz6" Oct 01 14:08:30 crc kubenswrapper[4605]: I1001 14:08:30.069968 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-s8vv8" podStartSLOduration=2.219812772 podStartE2EDuration="8.069944906s" podCreationTimestamp="2025-10-01 14:08:22 +0000 UTC" firstStartedPulling="2025-10-01 14:08:23.975268719 +0000 UTC m=+1426.719244917" lastFinishedPulling="2025-10-01 14:08:29.825400853 +0000 UTC m=+1432.569377051" observedRunningTime="2025-10-01 14:08:30.066499259 +0000 UTC m=+1432.810475467" watchObservedRunningTime="2025-10-01 14:08:30.069944906 +0000 UTC m=+1432.813921114" Oct 01 14:08:30 crc kubenswrapper[4605]: I1001 14:08:30.072960 4605 scope.go:117] "RemoveContainer" containerID="c615f8a3931a81cb32ff8c90d8d8bcb30c8a456acc7c85168bdb35897bce5d6a" Oct 01 14:08:30 crc kubenswrapper[4605]: I1001 14:08:30.087611 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-zqpz6"] Oct 01 14:08:30 crc kubenswrapper[4605]: I1001 14:08:30.111186 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-zqpz6"] Oct 01 14:08:30 crc kubenswrapper[4605]: I1001 14:08:30.112518 4605 scope.go:117] "RemoveContainer" containerID="4f9d90ab1c51d6da2586ba72d6f8a1c58af1e63cdee423d33d4dd07dd0d8fa8d" Oct 01 14:08:31 crc kubenswrapper[4605]: I1001 14:08:31.937710 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0cfcc390-49fd-4592-b68f-b37575bc12d0" path="/var/lib/kubelet/pods/0cfcc390-49fd-4592-b68f-b37575bc12d0/volumes" Oct 01 14:08:32 crc kubenswrapper[4605]: I1001 14:08:32.846937 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-s8vv8" Oct 01 14:08:32 crc kubenswrapper[4605]: I1001 14:08:32.848405 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-s8vv8" Oct 01 14:08:33 crc kubenswrapper[4605]: I1001 14:08:33.905303 4605 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-s8vv8" podUID="c87e4571-b973-4400-835f-1affd142c33d" containerName="registry-server" probeResult="failure" output=< Oct 01 14:08:33 crc kubenswrapper[4605]: timeout: failed to connect service ":50051" within 1s Oct 01 14:08:33 crc kubenswrapper[4605]: > Oct 01 14:08:42 crc kubenswrapper[4605]: I1001 14:08:42.893303 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-s8vv8" Oct 01 14:08:42 crc kubenswrapper[4605]: I1001 14:08:42.949694 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-s8vv8" Oct 01 14:08:43 crc kubenswrapper[4605]: I1001 14:08:43.136213 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-s8vv8"] Oct 01 14:08:44 crc kubenswrapper[4605]: I1001 14:08:44.182776 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-s8vv8" podUID="c87e4571-b973-4400-835f-1affd142c33d" containerName="registry-server" containerID="cri-o://2a2043e3f97180b1d8f1155502240fcd847e14f3fe722840e20c5e5887f87085" gracePeriod=2 Oct 01 14:08:44 crc kubenswrapper[4605]: I1001 14:08:44.633489 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-s8vv8" Oct 01 14:08:44 crc kubenswrapper[4605]: I1001 14:08:44.787788 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c87e4571-b973-4400-835f-1affd142c33d-utilities\") pod \"c87e4571-b973-4400-835f-1affd142c33d\" (UID: \"c87e4571-b973-4400-835f-1affd142c33d\") " Oct 01 14:08:44 crc kubenswrapper[4605]: I1001 14:08:44.787966 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c87e4571-b973-4400-835f-1affd142c33d-catalog-content\") pod \"c87e4571-b973-4400-835f-1affd142c33d\" (UID: \"c87e4571-b973-4400-835f-1affd142c33d\") " Oct 01 14:08:44 crc kubenswrapper[4605]: I1001 14:08:44.788113 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-856f8\" (UniqueName: \"kubernetes.io/projected/c87e4571-b973-4400-835f-1affd142c33d-kube-api-access-856f8\") pod \"c87e4571-b973-4400-835f-1affd142c33d\" (UID: \"c87e4571-b973-4400-835f-1affd142c33d\") " Oct 01 14:08:44 crc kubenswrapper[4605]: I1001 14:08:44.788736 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c87e4571-b973-4400-835f-1affd142c33d-utilities" (OuterVolumeSpecName: "utilities") pod "c87e4571-b973-4400-835f-1affd142c33d" (UID: "c87e4571-b973-4400-835f-1affd142c33d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:08:44 crc kubenswrapper[4605]: I1001 14:08:44.789246 4605 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c87e4571-b973-4400-835f-1affd142c33d-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 14:08:44 crc kubenswrapper[4605]: I1001 14:08:44.794754 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c87e4571-b973-4400-835f-1affd142c33d-kube-api-access-856f8" (OuterVolumeSpecName: "kube-api-access-856f8") pod "c87e4571-b973-4400-835f-1affd142c33d" (UID: "c87e4571-b973-4400-835f-1affd142c33d"). InnerVolumeSpecName "kube-api-access-856f8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:08:44 crc kubenswrapper[4605]: I1001 14:08:44.878309 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c87e4571-b973-4400-835f-1affd142c33d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c87e4571-b973-4400-835f-1affd142c33d" (UID: "c87e4571-b973-4400-835f-1affd142c33d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:08:44 crc kubenswrapper[4605]: I1001 14:08:44.891160 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-856f8\" (UniqueName: \"kubernetes.io/projected/c87e4571-b973-4400-835f-1affd142c33d-kube-api-access-856f8\") on node \"crc\" DevicePath \"\"" Oct 01 14:08:44 crc kubenswrapper[4605]: I1001 14:08:44.891194 4605 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c87e4571-b973-4400-835f-1affd142c33d-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 14:08:45 crc kubenswrapper[4605]: I1001 14:08:45.196613 4605 generic.go:334] "Generic (PLEG): container finished" podID="c87e4571-b973-4400-835f-1affd142c33d" containerID="2a2043e3f97180b1d8f1155502240fcd847e14f3fe722840e20c5e5887f87085" exitCode=0 Oct 01 14:08:45 crc kubenswrapper[4605]: I1001 14:08:45.196683 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-s8vv8" Oct 01 14:08:45 crc kubenswrapper[4605]: I1001 14:08:45.196682 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s8vv8" event={"ID":"c87e4571-b973-4400-835f-1affd142c33d","Type":"ContainerDied","Data":"2a2043e3f97180b1d8f1155502240fcd847e14f3fe722840e20c5e5887f87085"} Oct 01 14:08:45 crc kubenswrapper[4605]: I1001 14:08:45.196903 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s8vv8" event={"ID":"c87e4571-b973-4400-835f-1affd142c33d","Type":"ContainerDied","Data":"d9d2b16e0ae6c46c15bf40a2b194de5e91a667979bc55578aaf8d6db80ec4177"} Oct 01 14:08:45 crc kubenswrapper[4605]: I1001 14:08:45.196990 4605 scope.go:117] "RemoveContainer" containerID="2a2043e3f97180b1d8f1155502240fcd847e14f3fe722840e20c5e5887f87085" Oct 01 14:08:45 crc kubenswrapper[4605]: I1001 14:08:45.234933 4605 scope.go:117] "RemoveContainer" containerID="22999a4ce75ed1f84bf39ce107049296aa2e8d0d41bda4a803953351c6fa86b8" Oct 01 14:08:45 crc kubenswrapper[4605]: I1001 14:08:45.252328 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-s8vv8"] Oct 01 14:08:45 crc kubenswrapper[4605]: I1001 14:08:45.257817 4605 scope.go:117] "RemoveContainer" containerID="d5729e3fbf9fa34d6e91c2e5b99bc346003c867aaf5cb593bc7bcf7414b8802d" Oct 01 14:08:45 crc kubenswrapper[4605]: I1001 14:08:45.264359 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-s8vv8"] Oct 01 14:08:45 crc kubenswrapper[4605]: I1001 14:08:45.322562 4605 scope.go:117] "RemoveContainer" containerID="2a2043e3f97180b1d8f1155502240fcd847e14f3fe722840e20c5e5887f87085" Oct 01 14:08:45 crc kubenswrapper[4605]: E1001 14:08:45.323224 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2a2043e3f97180b1d8f1155502240fcd847e14f3fe722840e20c5e5887f87085\": container with ID starting with 2a2043e3f97180b1d8f1155502240fcd847e14f3fe722840e20c5e5887f87085 not found: ID does not exist" containerID="2a2043e3f97180b1d8f1155502240fcd847e14f3fe722840e20c5e5887f87085" Oct 01 14:08:45 crc kubenswrapper[4605]: I1001 14:08:45.323267 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2a2043e3f97180b1d8f1155502240fcd847e14f3fe722840e20c5e5887f87085"} err="failed to get container status \"2a2043e3f97180b1d8f1155502240fcd847e14f3fe722840e20c5e5887f87085\": rpc error: code = NotFound desc = could not find container \"2a2043e3f97180b1d8f1155502240fcd847e14f3fe722840e20c5e5887f87085\": container with ID starting with 2a2043e3f97180b1d8f1155502240fcd847e14f3fe722840e20c5e5887f87085 not found: ID does not exist" Oct 01 14:08:45 crc kubenswrapper[4605]: I1001 14:08:45.323292 4605 scope.go:117] "RemoveContainer" containerID="22999a4ce75ed1f84bf39ce107049296aa2e8d0d41bda4a803953351c6fa86b8" Oct 01 14:08:45 crc kubenswrapper[4605]: E1001 14:08:45.323654 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"22999a4ce75ed1f84bf39ce107049296aa2e8d0d41bda4a803953351c6fa86b8\": container with ID starting with 22999a4ce75ed1f84bf39ce107049296aa2e8d0d41bda4a803953351c6fa86b8 not found: ID does not exist" containerID="22999a4ce75ed1f84bf39ce107049296aa2e8d0d41bda4a803953351c6fa86b8" Oct 01 14:08:45 crc kubenswrapper[4605]: I1001 14:08:45.323696 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"22999a4ce75ed1f84bf39ce107049296aa2e8d0d41bda4a803953351c6fa86b8"} err="failed to get container status \"22999a4ce75ed1f84bf39ce107049296aa2e8d0d41bda4a803953351c6fa86b8\": rpc error: code = NotFound desc = could not find container \"22999a4ce75ed1f84bf39ce107049296aa2e8d0d41bda4a803953351c6fa86b8\": container with ID starting with 22999a4ce75ed1f84bf39ce107049296aa2e8d0d41bda4a803953351c6fa86b8 not found: ID does not exist" Oct 01 14:08:45 crc kubenswrapper[4605]: I1001 14:08:45.323727 4605 scope.go:117] "RemoveContainer" containerID="d5729e3fbf9fa34d6e91c2e5b99bc346003c867aaf5cb593bc7bcf7414b8802d" Oct 01 14:08:45 crc kubenswrapper[4605]: E1001 14:08:45.324020 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d5729e3fbf9fa34d6e91c2e5b99bc346003c867aaf5cb593bc7bcf7414b8802d\": container with ID starting with d5729e3fbf9fa34d6e91c2e5b99bc346003c867aaf5cb593bc7bcf7414b8802d not found: ID does not exist" containerID="d5729e3fbf9fa34d6e91c2e5b99bc346003c867aaf5cb593bc7bcf7414b8802d" Oct 01 14:08:45 crc kubenswrapper[4605]: I1001 14:08:45.324061 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d5729e3fbf9fa34d6e91c2e5b99bc346003c867aaf5cb593bc7bcf7414b8802d"} err="failed to get container status \"d5729e3fbf9fa34d6e91c2e5b99bc346003c867aaf5cb593bc7bcf7414b8802d\": rpc error: code = NotFound desc = could not find container \"d5729e3fbf9fa34d6e91c2e5b99bc346003c867aaf5cb593bc7bcf7414b8802d\": container with ID starting with d5729e3fbf9fa34d6e91c2e5b99bc346003c867aaf5cb593bc7bcf7414b8802d not found: ID does not exist" Oct 01 14:08:45 crc kubenswrapper[4605]: I1001 14:08:45.960177 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c87e4571-b973-4400-835f-1affd142c33d" path="/var/lib/kubelet/pods/c87e4571-b973-4400-835f-1affd142c33d/volumes" Oct 01 14:08:48 crc kubenswrapper[4605]: I1001 14:08:48.819889 4605 scope.go:117] "RemoveContainer" containerID="57cf237c4829da4dbf12faad24a5e9bf332462eb8be5ae8c2d20f53919726afa" Oct 01 14:10:21 crc kubenswrapper[4605]: I1001 14:10:21.631195 4605 patch_prober.go:28] interesting pod/machine-config-daemon-zdjh7 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 14:10:21 crc kubenswrapper[4605]: I1001 14:10:21.631637 4605 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 14:10:51 crc kubenswrapper[4605]: I1001 14:10:51.631507 4605 patch_prober.go:28] interesting pod/machine-config-daemon-zdjh7 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 14:10:51 crc kubenswrapper[4605]: I1001 14:10:51.632215 4605 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 14:11:13 crc kubenswrapper[4605]: I1001 14:11:13.045552 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-d6qqt"] Oct 01 14:11:13 crc kubenswrapper[4605]: I1001 14:11:13.053002 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-d6qqt"] Oct 01 14:11:13 crc kubenswrapper[4605]: I1001 14:11:13.943056 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0effdbf0-5be0-4076-b5a6-2b941e5d16e3" path="/var/lib/kubelet/pods/0effdbf0-5be0-4076-b5a6-2b941e5d16e3/volumes" Oct 01 14:11:14 crc kubenswrapper[4605]: I1001 14:11:14.054550 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-kfnp9"] Oct 01 14:11:14 crc kubenswrapper[4605]: I1001 14:11:14.070041 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-kfnp9"] Oct 01 14:11:14 crc kubenswrapper[4605]: I1001 14:11:14.079536 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-tfbpz"] Oct 01 14:11:14 crc kubenswrapper[4605]: I1001 14:11:14.088694 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-tfbpz"] Oct 01 14:11:15 crc kubenswrapper[4605]: I1001 14:11:15.659935 4605 generic.go:334] "Generic (PLEG): container finished" podID="b1ad20ed-a28e-4787-ba19-58f0d1a1e5b6" containerID="c5b8cdab6513bc67aaa29445b0db3da140b1ffb3111078a17bba85a061209ddd" exitCode=0 Oct 01 14:11:15 crc kubenswrapper[4605]: I1001 14:11:15.660654 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-z9rb2" event={"ID":"b1ad20ed-a28e-4787-ba19-58f0d1a1e5b6","Type":"ContainerDied","Data":"c5b8cdab6513bc67aaa29445b0db3da140b1ffb3111078a17bba85a061209ddd"} Oct 01 14:11:15 crc kubenswrapper[4605]: I1001 14:11:15.940471 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="202abc2b-e141-498c-9198-489fbc0e5130" path="/var/lib/kubelet/pods/202abc2b-e141-498c-9198-489fbc0e5130/volumes" Oct 01 14:11:15 crc kubenswrapper[4605]: I1001 14:11:15.942180 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="77127160-38c8-47a8-920b-90a7b2cd8e3f" path="/var/lib/kubelet/pods/77127160-38c8-47a8-920b-90a7b2cd8e3f/volumes" Oct 01 14:11:17 crc kubenswrapper[4605]: I1001 14:11:17.078034 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-z9rb2" Oct 01 14:11:17 crc kubenswrapper[4605]: I1001 14:11:17.171227 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b1ad20ed-a28e-4787-ba19-58f0d1a1e5b6-inventory\") pod \"b1ad20ed-a28e-4787-ba19-58f0d1a1e5b6\" (UID: \"b1ad20ed-a28e-4787-ba19-58f0d1a1e5b6\") " Oct 01 14:11:17 crc kubenswrapper[4605]: I1001 14:11:17.171411 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1ad20ed-a28e-4787-ba19-58f0d1a1e5b6-bootstrap-combined-ca-bundle\") pod \"b1ad20ed-a28e-4787-ba19-58f0d1a1e5b6\" (UID: \"b1ad20ed-a28e-4787-ba19-58f0d1a1e5b6\") " Oct 01 14:11:17 crc kubenswrapper[4605]: I1001 14:11:17.171534 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b1ad20ed-a28e-4787-ba19-58f0d1a1e5b6-ssh-key\") pod \"b1ad20ed-a28e-4787-ba19-58f0d1a1e5b6\" (UID: \"b1ad20ed-a28e-4787-ba19-58f0d1a1e5b6\") " Oct 01 14:11:17 crc kubenswrapper[4605]: I1001 14:11:17.171596 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lmr8k\" (UniqueName: \"kubernetes.io/projected/b1ad20ed-a28e-4787-ba19-58f0d1a1e5b6-kube-api-access-lmr8k\") pod \"b1ad20ed-a28e-4787-ba19-58f0d1a1e5b6\" (UID: \"b1ad20ed-a28e-4787-ba19-58f0d1a1e5b6\") " Oct 01 14:11:17 crc kubenswrapper[4605]: I1001 14:11:17.176883 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b1ad20ed-a28e-4787-ba19-58f0d1a1e5b6-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "b1ad20ed-a28e-4787-ba19-58f0d1a1e5b6" (UID: "b1ad20ed-a28e-4787-ba19-58f0d1a1e5b6"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:11:17 crc kubenswrapper[4605]: I1001 14:11:17.182405 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b1ad20ed-a28e-4787-ba19-58f0d1a1e5b6-kube-api-access-lmr8k" (OuterVolumeSpecName: "kube-api-access-lmr8k") pod "b1ad20ed-a28e-4787-ba19-58f0d1a1e5b6" (UID: "b1ad20ed-a28e-4787-ba19-58f0d1a1e5b6"). InnerVolumeSpecName "kube-api-access-lmr8k". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:11:17 crc kubenswrapper[4605]: I1001 14:11:17.205368 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b1ad20ed-a28e-4787-ba19-58f0d1a1e5b6-inventory" (OuterVolumeSpecName: "inventory") pod "b1ad20ed-a28e-4787-ba19-58f0d1a1e5b6" (UID: "b1ad20ed-a28e-4787-ba19-58f0d1a1e5b6"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:11:17 crc kubenswrapper[4605]: I1001 14:11:17.222999 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b1ad20ed-a28e-4787-ba19-58f0d1a1e5b6-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "b1ad20ed-a28e-4787-ba19-58f0d1a1e5b6" (UID: "b1ad20ed-a28e-4787-ba19-58f0d1a1e5b6"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:11:17 crc kubenswrapper[4605]: I1001 14:11:17.274251 4605 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1ad20ed-a28e-4787-ba19-58f0d1a1e5b6-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 14:11:17 crc kubenswrapper[4605]: I1001 14:11:17.274496 4605 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b1ad20ed-a28e-4787-ba19-58f0d1a1e5b6-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 01 14:11:17 crc kubenswrapper[4605]: I1001 14:11:17.274618 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lmr8k\" (UniqueName: \"kubernetes.io/projected/b1ad20ed-a28e-4787-ba19-58f0d1a1e5b6-kube-api-access-lmr8k\") on node \"crc\" DevicePath \"\"" Oct 01 14:11:17 crc kubenswrapper[4605]: I1001 14:11:17.274682 4605 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b1ad20ed-a28e-4787-ba19-58f0d1a1e5b6-inventory\") on node \"crc\" DevicePath \"\"" Oct 01 14:11:17 crc kubenswrapper[4605]: I1001 14:11:17.678582 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-z9rb2" event={"ID":"b1ad20ed-a28e-4787-ba19-58f0d1a1e5b6","Type":"ContainerDied","Data":"ceb69b5117ad330c0e680e3f687ccc5dfdebb42b32ae731611953342e634a2fd"} Oct 01 14:11:17 crc kubenswrapper[4605]: I1001 14:11:17.678636 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-z9rb2" Oct 01 14:11:17 crc kubenswrapper[4605]: I1001 14:11:17.678641 4605 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ceb69b5117ad330c0e680e3f687ccc5dfdebb42b32ae731611953342e634a2fd" Oct 01 14:11:17 crc kubenswrapper[4605]: I1001 14:11:17.796024 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-8c89v"] Oct 01 14:11:17 crc kubenswrapper[4605]: E1001 14:11:17.796697 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c87e4571-b973-4400-835f-1affd142c33d" containerName="extract-utilities" Oct 01 14:11:17 crc kubenswrapper[4605]: I1001 14:11:17.796713 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="c87e4571-b973-4400-835f-1affd142c33d" containerName="extract-utilities" Oct 01 14:11:17 crc kubenswrapper[4605]: E1001 14:11:17.796725 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0cfcc390-49fd-4592-b68f-b37575bc12d0" containerName="extract-utilities" Oct 01 14:11:17 crc kubenswrapper[4605]: I1001 14:11:17.796733 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="0cfcc390-49fd-4592-b68f-b37575bc12d0" containerName="extract-utilities" Oct 01 14:11:17 crc kubenswrapper[4605]: E1001 14:11:17.796744 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c87e4571-b973-4400-835f-1affd142c33d" containerName="registry-server" Oct 01 14:11:17 crc kubenswrapper[4605]: I1001 14:11:17.796750 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="c87e4571-b973-4400-835f-1affd142c33d" containerName="registry-server" Oct 01 14:11:17 crc kubenswrapper[4605]: E1001 14:11:17.796769 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0cfcc390-49fd-4592-b68f-b37575bc12d0" containerName="extract-content" Oct 01 14:11:17 crc kubenswrapper[4605]: I1001 14:11:17.796774 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="0cfcc390-49fd-4592-b68f-b37575bc12d0" containerName="extract-content" Oct 01 14:11:17 crc kubenswrapper[4605]: E1001 14:11:17.796784 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1ad20ed-a28e-4787-ba19-58f0d1a1e5b6" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Oct 01 14:11:17 crc kubenswrapper[4605]: I1001 14:11:17.796790 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1ad20ed-a28e-4787-ba19-58f0d1a1e5b6" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Oct 01 14:11:17 crc kubenswrapper[4605]: E1001 14:11:17.796807 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c87e4571-b973-4400-835f-1affd142c33d" containerName="extract-content" Oct 01 14:11:17 crc kubenswrapper[4605]: I1001 14:11:17.796812 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="c87e4571-b973-4400-835f-1affd142c33d" containerName="extract-content" Oct 01 14:11:17 crc kubenswrapper[4605]: E1001 14:11:17.796825 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0cfcc390-49fd-4592-b68f-b37575bc12d0" containerName="registry-server" Oct 01 14:11:17 crc kubenswrapper[4605]: I1001 14:11:17.796830 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="0cfcc390-49fd-4592-b68f-b37575bc12d0" containerName="registry-server" Oct 01 14:11:17 crc kubenswrapper[4605]: I1001 14:11:17.797007 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="c87e4571-b973-4400-835f-1affd142c33d" containerName="registry-server" Oct 01 14:11:17 crc kubenswrapper[4605]: I1001 14:11:17.797025 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="0cfcc390-49fd-4592-b68f-b37575bc12d0" containerName="registry-server" Oct 01 14:11:17 crc kubenswrapper[4605]: I1001 14:11:17.797041 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="b1ad20ed-a28e-4787-ba19-58f0d1a1e5b6" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Oct 01 14:11:17 crc kubenswrapper[4605]: I1001 14:11:17.803463 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-8c89v" Oct 01 14:11:17 crc kubenswrapper[4605]: I1001 14:11:17.809786 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-dpzpx" Oct 01 14:11:17 crc kubenswrapper[4605]: I1001 14:11:17.810593 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 01 14:11:17 crc kubenswrapper[4605]: I1001 14:11:17.810989 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 01 14:11:17 crc kubenswrapper[4605]: I1001 14:11:17.819260 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 01 14:11:17 crc kubenswrapper[4605]: I1001 14:11:17.841253 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-8c89v"] Oct 01 14:11:17 crc kubenswrapper[4605]: I1001 14:11:17.884555 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/88958218-6061-4e38-b6fd-88b9502ebf30-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-8c89v\" (UID: \"88958218-6061-4e38-b6fd-88b9502ebf30\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-8c89v" Oct 01 14:11:17 crc kubenswrapper[4605]: I1001 14:11:17.884704 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hbkp2\" (UniqueName: \"kubernetes.io/projected/88958218-6061-4e38-b6fd-88b9502ebf30-kube-api-access-hbkp2\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-8c89v\" (UID: \"88958218-6061-4e38-b6fd-88b9502ebf30\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-8c89v" Oct 01 14:11:17 crc kubenswrapper[4605]: I1001 14:11:17.884753 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/88958218-6061-4e38-b6fd-88b9502ebf30-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-8c89v\" (UID: \"88958218-6061-4e38-b6fd-88b9502ebf30\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-8c89v" Oct 01 14:11:17 crc kubenswrapper[4605]: I1001 14:11:17.986313 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/88958218-6061-4e38-b6fd-88b9502ebf30-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-8c89v\" (UID: \"88958218-6061-4e38-b6fd-88b9502ebf30\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-8c89v" Oct 01 14:11:17 crc kubenswrapper[4605]: I1001 14:11:17.986421 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hbkp2\" (UniqueName: \"kubernetes.io/projected/88958218-6061-4e38-b6fd-88b9502ebf30-kube-api-access-hbkp2\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-8c89v\" (UID: \"88958218-6061-4e38-b6fd-88b9502ebf30\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-8c89v" Oct 01 14:11:17 crc kubenswrapper[4605]: I1001 14:11:17.986459 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/88958218-6061-4e38-b6fd-88b9502ebf30-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-8c89v\" (UID: \"88958218-6061-4e38-b6fd-88b9502ebf30\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-8c89v" Oct 01 14:11:17 crc kubenswrapper[4605]: I1001 14:11:17.990672 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/88958218-6061-4e38-b6fd-88b9502ebf30-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-8c89v\" (UID: \"88958218-6061-4e38-b6fd-88b9502ebf30\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-8c89v" Oct 01 14:11:17 crc kubenswrapper[4605]: I1001 14:11:17.993730 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/88958218-6061-4e38-b6fd-88b9502ebf30-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-8c89v\" (UID: \"88958218-6061-4e38-b6fd-88b9502ebf30\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-8c89v" Oct 01 14:11:18 crc kubenswrapper[4605]: I1001 14:11:18.009866 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hbkp2\" (UniqueName: \"kubernetes.io/projected/88958218-6061-4e38-b6fd-88b9502ebf30-kube-api-access-hbkp2\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-8c89v\" (UID: \"88958218-6061-4e38-b6fd-88b9502ebf30\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-8c89v" Oct 01 14:11:18 crc kubenswrapper[4605]: I1001 14:11:18.136162 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-8c89v" Oct 01 14:11:18 crc kubenswrapper[4605]: I1001 14:11:18.633931 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-8c89v"] Oct 01 14:11:18 crc kubenswrapper[4605]: I1001 14:11:18.645395 4605 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 01 14:11:18 crc kubenswrapper[4605]: I1001 14:11:18.690694 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-8c89v" event={"ID":"88958218-6061-4e38-b6fd-88b9502ebf30","Type":"ContainerStarted","Data":"c8690313b0c657776a7472238ee03a413e027f28bec34a5714ba910ed91f3521"} Oct 01 14:11:20 crc kubenswrapper[4605]: I1001 14:11:20.725439 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-8c89v" event={"ID":"88958218-6061-4e38-b6fd-88b9502ebf30","Type":"ContainerStarted","Data":"c68d6d2bb2bb5c45866bfc453a5a6e7fddba44c4357f12f96f207d2cbdbe9e71"} Oct 01 14:11:21 crc kubenswrapper[4605]: I1001 14:11:21.631494 4605 patch_prober.go:28] interesting pod/machine-config-daemon-zdjh7 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 14:11:21 crc kubenswrapper[4605]: I1001 14:11:21.631993 4605 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 14:11:21 crc kubenswrapper[4605]: I1001 14:11:21.632047 4605 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" Oct 01 14:11:21 crc kubenswrapper[4605]: I1001 14:11:21.633027 4605 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"3c03d2675f439d8ac4c6a292ee6900d11d4b18b69f30da560ed0997de5eb5196"} pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 01 14:11:21 crc kubenswrapper[4605]: I1001 14:11:21.633118 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" containerName="machine-config-daemon" containerID="cri-o://3c03d2675f439d8ac4c6a292ee6900d11d4b18b69f30da560ed0997de5eb5196" gracePeriod=600 Oct 01 14:11:21 crc kubenswrapper[4605]: E1001 14:11:21.768999 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:11:22 crc kubenswrapper[4605]: I1001 14:11:22.745732 4605 generic.go:334] "Generic (PLEG): container finished" podID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" containerID="3c03d2675f439d8ac4c6a292ee6900d11d4b18b69f30da560ed0997de5eb5196" exitCode=0 Oct 01 14:11:22 crc kubenswrapper[4605]: I1001 14:11:22.745819 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" event={"ID":"f3023060-c8ae-492b-b1cb-a418d9a8e59f","Type":"ContainerDied","Data":"3c03d2675f439d8ac4c6a292ee6900d11d4b18b69f30da560ed0997de5eb5196"} Oct 01 14:11:22 crc kubenswrapper[4605]: I1001 14:11:22.746071 4605 scope.go:117] "RemoveContainer" containerID="31f8ad2782362fc773c73e454aeec697a35f4e0956b1bf4d85878b45beec465b" Oct 01 14:11:22 crc kubenswrapper[4605]: I1001 14:11:22.746897 4605 scope.go:117] "RemoveContainer" containerID="3c03d2675f439d8ac4c6a292ee6900d11d4b18b69f30da560ed0997de5eb5196" Oct 01 14:11:22 crc kubenswrapper[4605]: E1001 14:11:22.747614 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:11:22 crc kubenswrapper[4605]: I1001 14:11:22.777290 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-8c89v" podStartSLOduration=4.45279081 podStartE2EDuration="5.777272245s" podCreationTimestamp="2025-10-01 14:11:17 +0000 UTC" firstStartedPulling="2025-10-01 14:11:18.645144432 +0000 UTC m=+1601.389120640" lastFinishedPulling="2025-10-01 14:11:19.969625827 +0000 UTC m=+1602.713602075" observedRunningTime="2025-10-01 14:11:20.75275344 +0000 UTC m=+1603.496729668" watchObservedRunningTime="2025-10-01 14:11:22.777272245 +0000 UTC m=+1605.521248453" Oct 01 14:11:23 crc kubenswrapper[4605]: I1001 14:11:23.026536 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-10d2-account-create-tlgkg"] Oct 01 14:11:23 crc kubenswrapper[4605]: I1001 14:11:23.038054 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-10d2-account-create-tlgkg"] Oct 01 14:11:23 crc kubenswrapper[4605]: I1001 14:11:23.937281 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="830104b9-e272-4098-ac03-f03681f72078" path="/var/lib/kubelet/pods/830104b9-e272-4098-ac03-f03681f72078/volumes" Oct 01 14:11:24 crc kubenswrapper[4605]: I1001 14:11:24.026748 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-3f5e-account-create-r2dbh"] Oct 01 14:11:24 crc kubenswrapper[4605]: I1001 14:11:24.036326 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-3f5e-account-create-r2dbh"] Oct 01 14:11:24 crc kubenswrapper[4605]: I1001 14:11:24.046430 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-a1df-account-create-tt6sl"] Oct 01 14:11:24 crc kubenswrapper[4605]: I1001 14:11:24.053671 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-a1df-account-create-tt6sl"] Oct 01 14:11:25 crc kubenswrapper[4605]: I1001 14:11:25.944401 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1a17c5ad-ae46-4ac5-8195-b33580e0e77d" path="/var/lib/kubelet/pods/1a17c5ad-ae46-4ac5-8195-b33580e0e77d/volumes" Oct 01 14:11:25 crc kubenswrapper[4605]: I1001 14:11:25.947844 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9e3467e0-d2bc-48eb-a15a-16a3ebd1d813" path="/var/lib/kubelet/pods/9e3467e0-d2bc-48eb-a15a-16a3ebd1d813/volumes" Oct 01 14:11:36 crc kubenswrapper[4605]: I1001 14:11:36.926831 4605 scope.go:117] "RemoveContainer" containerID="3c03d2675f439d8ac4c6a292ee6900d11d4b18b69f30da560ed0997de5eb5196" Oct 01 14:11:36 crc kubenswrapper[4605]: E1001 14:11:36.927518 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:11:48 crc kubenswrapper[4605]: I1001 14:11:48.047244 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-rrkt4"] Oct 01 14:11:48 crc kubenswrapper[4605]: I1001 14:11:48.057242 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-9fz97"] Oct 01 14:11:48 crc kubenswrapper[4605]: I1001 14:11:48.070243 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-gcgcz"] Oct 01 14:11:48 crc kubenswrapper[4605]: I1001 14:11:48.081926 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-rrkt4"] Oct 01 14:11:48 crc kubenswrapper[4605]: I1001 14:11:48.090231 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-9fz97"] Oct 01 14:11:48 crc kubenswrapper[4605]: I1001 14:11:48.097091 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-gcgcz"] Oct 01 14:11:48 crc kubenswrapper[4605]: I1001 14:11:48.988922 4605 scope.go:117] "RemoveContainer" containerID="af7b763235d9d5ec306603244eefa291e4fc408065b9a8a45171dbddb62cef69" Oct 01 14:11:49 crc kubenswrapper[4605]: I1001 14:11:49.024228 4605 scope.go:117] "RemoveContainer" containerID="701bc72e5736e51beddc7d0057d6a8028da318154b89eb18c8c171f84e698d09" Oct 01 14:11:49 crc kubenswrapper[4605]: I1001 14:11:49.061742 4605 scope.go:117] "RemoveContainer" containerID="5c79797f02d4fb162eee6159e4e768992c6e85dd7b561ffe254139dc09a015f9" Oct 01 14:11:49 crc kubenswrapper[4605]: I1001 14:11:49.104414 4605 scope.go:117] "RemoveContainer" containerID="7f928d31bbf0d797d8853b8870397f4392e5f69e5fb826772107d0bf7c886718" Oct 01 14:11:49 crc kubenswrapper[4605]: I1001 14:11:49.141888 4605 scope.go:117] "RemoveContainer" containerID="e63a9c9fa0be55ac8f9bf99febd858d93a221af5fe4caed4032d9a8fb8c6984f" Oct 01 14:11:49 crc kubenswrapper[4605]: I1001 14:11:49.179306 4605 scope.go:117] "RemoveContainer" containerID="87fc9f8e8849a911550de077eee0ac95c773779ad40f1c5d99475dbd37847e2d" Oct 01 14:11:49 crc kubenswrapper[4605]: I1001 14:11:49.208115 4605 scope.go:117] "RemoveContainer" containerID="d97594c5ab5cd3ff7e18537c5071f49d81a9eb5cd7ca3feae668dbb5ca0ee440" Oct 01 14:11:49 crc kubenswrapper[4605]: I1001 14:11:49.244247 4605 scope.go:117] "RemoveContainer" containerID="4cc13cd50b58ff7ceb886e971b88a8db673aba466c79ec59bdeeeaa9e14afcf0" Oct 01 14:11:49 crc kubenswrapper[4605]: I1001 14:11:49.264437 4605 scope.go:117] "RemoveContainer" containerID="048bfa2a9b573d02c9a76af2b4713db677ff87697f741183e2a1c33999f6ad28" Oct 01 14:11:49 crc kubenswrapper[4605]: I1001 14:11:49.293590 4605 scope.go:117] "RemoveContainer" containerID="0bd576d0f0ca5a7e0ee3cd08c3d585797589b9efdd5683773f1ffd21c2de7067" Oct 01 14:11:49 crc kubenswrapper[4605]: I1001 14:11:49.320468 4605 scope.go:117] "RemoveContainer" containerID="ef32932c77e1999fec090f0e565c0665639fe0043a0a3edd6b5a48c22458bedc" Oct 01 14:11:49 crc kubenswrapper[4605]: I1001 14:11:49.926569 4605 scope.go:117] "RemoveContainer" containerID="3c03d2675f439d8ac4c6a292ee6900d11d4b18b69f30da560ed0997de5eb5196" Oct 01 14:11:49 crc kubenswrapper[4605]: E1001 14:11:49.926856 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:11:49 crc kubenswrapper[4605]: I1001 14:11:49.940369 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="06a3c21a-ca5f-44a9-a6da-ea4ddb772f43" path="/var/lib/kubelet/pods/06a3c21a-ca5f-44a9-a6da-ea4ddb772f43/volumes" Oct 01 14:11:49 crc kubenswrapper[4605]: I1001 14:11:49.948347 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="158e3a16-bd4b-45e2-be45-e8f36efc579d" path="/var/lib/kubelet/pods/158e3a16-bd4b-45e2-be45-e8f36efc579d/volumes" Oct 01 14:11:49 crc kubenswrapper[4605]: I1001 14:11:49.953488 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="92500ff4-ab25-4756-831e-b32aba1e71ff" path="/var/lib/kubelet/pods/92500ff4-ab25-4756-831e-b32aba1e71ff/volumes" Oct 01 14:11:59 crc kubenswrapper[4605]: I1001 14:11:59.046291 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-86rqc"] Oct 01 14:11:59 crc kubenswrapper[4605]: I1001 14:11:59.053858 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-86rqc"] Oct 01 14:11:59 crc kubenswrapper[4605]: I1001 14:11:59.946312 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a204e126-3626-4105-840d-85d43b095f8b" path="/var/lib/kubelet/pods/a204e126-3626-4105-840d-85d43b095f8b/volumes" Oct 01 14:12:01 crc kubenswrapper[4605]: I1001 14:12:01.032828 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-70b3-account-create-kk9zv"] Oct 01 14:12:01 crc kubenswrapper[4605]: I1001 14:12:01.041441 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-70b3-account-create-kk9zv"] Oct 01 14:12:01 crc kubenswrapper[4605]: I1001 14:12:01.938732 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3fd1d985-bf1b-4b7e-9df9-f5a3ca363092" path="/var/lib/kubelet/pods/3fd1d985-bf1b-4b7e-9df9-f5a3ca363092/volumes" Oct 01 14:12:02 crc kubenswrapper[4605]: I1001 14:12:02.034738 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-32cc-account-create-cbl5v"] Oct 01 14:12:02 crc kubenswrapper[4605]: I1001 14:12:02.043816 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-2e10-account-create-57khj"] Oct 01 14:12:02 crc kubenswrapper[4605]: I1001 14:12:02.052387 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-l69r5"] Oct 01 14:12:02 crc kubenswrapper[4605]: I1001 14:12:02.061483 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-2e10-account-create-57khj"] Oct 01 14:12:02 crc kubenswrapper[4605]: I1001 14:12:02.070260 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-32cc-account-create-cbl5v"] Oct 01 14:12:02 crc kubenswrapper[4605]: I1001 14:12:02.078173 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-l69r5"] Oct 01 14:12:03 crc kubenswrapper[4605]: I1001 14:12:03.938655 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a17b7512-23cb-4c81-a755-788fecd5ece2" path="/var/lib/kubelet/pods/a17b7512-23cb-4c81-a755-788fecd5ece2/volumes" Oct 01 14:12:03 crc kubenswrapper[4605]: I1001 14:12:03.940412 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a9489bed-ef81-40d8-8a3e-1cc162ced1b6" path="/var/lib/kubelet/pods/a9489bed-ef81-40d8-8a3e-1cc162ced1b6/volumes" Oct 01 14:12:03 crc kubenswrapper[4605]: I1001 14:12:03.941902 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c9b91c60-f218-4df3-ad79-b40f7614f5dd" path="/var/lib/kubelet/pods/c9b91c60-f218-4df3-ad79-b40f7614f5dd/volumes" Oct 01 14:12:04 crc kubenswrapper[4605]: I1001 14:12:04.927290 4605 scope.go:117] "RemoveContainer" containerID="3c03d2675f439d8ac4c6a292ee6900d11d4b18b69f30da560ed0997de5eb5196" Oct 01 14:12:04 crc kubenswrapper[4605]: E1001 14:12:04.927566 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:12:15 crc kubenswrapper[4605]: I1001 14:12:15.341033 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-k4dk5"] Oct 01 14:12:15 crc kubenswrapper[4605]: I1001 14:12:15.343947 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-k4dk5" Oct 01 14:12:15 crc kubenswrapper[4605]: I1001 14:12:15.361412 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-k4dk5"] Oct 01 14:12:15 crc kubenswrapper[4605]: I1001 14:12:15.463907 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/82623e03-029a-4955-92c9-0a5cb52e074b-utilities\") pod \"certified-operators-k4dk5\" (UID: \"82623e03-029a-4955-92c9-0a5cb52e074b\") " pod="openshift-marketplace/certified-operators-k4dk5" Oct 01 14:12:15 crc kubenswrapper[4605]: I1001 14:12:15.464984 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l8xxm\" (UniqueName: \"kubernetes.io/projected/82623e03-029a-4955-92c9-0a5cb52e074b-kube-api-access-l8xxm\") pod \"certified-operators-k4dk5\" (UID: \"82623e03-029a-4955-92c9-0a5cb52e074b\") " pod="openshift-marketplace/certified-operators-k4dk5" Oct 01 14:12:15 crc kubenswrapper[4605]: I1001 14:12:15.465188 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/82623e03-029a-4955-92c9-0a5cb52e074b-catalog-content\") pod \"certified-operators-k4dk5\" (UID: \"82623e03-029a-4955-92c9-0a5cb52e074b\") " pod="openshift-marketplace/certified-operators-k4dk5" Oct 01 14:12:15 crc kubenswrapper[4605]: I1001 14:12:15.566378 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l8xxm\" (UniqueName: \"kubernetes.io/projected/82623e03-029a-4955-92c9-0a5cb52e074b-kube-api-access-l8xxm\") pod \"certified-operators-k4dk5\" (UID: \"82623e03-029a-4955-92c9-0a5cb52e074b\") " pod="openshift-marketplace/certified-operators-k4dk5" Oct 01 14:12:15 crc kubenswrapper[4605]: I1001 14:12:15.566452 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/82623e03-029a-4955-92c9-0a5cb52e074b-catalog-content\") pod \"certified-operators-k4dk5\" (UID: \"82623e03-029a-4955-92c9-0a5cb52e074b\") " pod="openshift-marketplace/certified-operators-k4dk5" Oct 01 14:12:15 crc kubenswrapper[4605]: I1001 14:12:15.566618 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/82623e03-029a-4955-92c9-0a5cb52e074b-utilities\") pod \"certified-operators-k4dk5\" (UID: \"82623e03-029a-4955-92c9-0a5cb52e074b\") " pod="openshift-marketplace/certified-operators-k4dk5" Oct 01 14:12:15 crc kubenswrapper[4605]: I1001 14:12:15.567148 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/82623e03-029a-4955-92c9-0a5cb52e074b-utilities\") pod \"certified-operators-k4dk5\" (UID: \"82623e03-029a-4955-92c9-0a5cb52e074b\") " pod="openshift-marketplace/certified-operators-k4dk5" Oct 01 14:12:15 crc kubenswrapper[4605]: I1001 14:12:15.567342 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/82623e03-029a-4955-92c9-0a5cb52e074b-catalog-content\") pod \"certified-operators-k4dk5\" (UID: \"82623e03-029a-4955-92c9-0a5cb52e074b\") " pod="openshift-marketplace/certified-operators-k4dk5" Oct 01 14:12:15 crc kubenswrapper[4605]: I1001 14:12:15.584114 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l8xxm\" (UniqueName: \"kubernetes.io/projected/82623e03-029a-4955-92c9-0a5cb52e074b-kube-api-access-l8xxm\") pod \"certified-operators-k4dk5\" (UID: \"82623e03-029a-4955-92c9-0a5cb52e074b\") " pod="openshift-marketplace/certified-operators-k4dk5" Oct 01 14:12:15 crc kubenswrapper[4605]: I1001 14:12:15.671299 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-k4dk5" Oct 01 14:12:15 crc kubenswrapper[4605]: I1001 14:12:15.927786 4605 scope.go:117] "RemoveContainer" containerID="3c03d2675f439d8ac4c6a292ee6900d11d4b18b69f30da560ed0997de5eb5196" Oct 01 14:12:15 crc kubenswrapper[4605]: E1001 14:12:15.928259 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:12:16 crc kubenswrapper[4605]: I1001 14:12:16.163556 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-k4dk5"] Oct 01 14:12:16 crc kubenswrapper[4605]: W1001 14:12:16.172605 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod82623e03_029a_4955_92c9_0a5cb52e074b.slice/crio-5edc8c979f2e815d6def425cac7523161bf5ac03daf3994bab2fa31067325c54 WatchSource:0}: Error finding container 5edc8c979f2e815d6def425cac7523161bf5ac03daf3994bab2fa31067325c54: Status 404 returned error can't find the container with id 5edc8c979f2e815d6def425cac7523161bf5ac03daf3994bab2fa31067325c54 Oct 01 14:12:16 crc kubenswrapper[4605]: I1001 14:12:16.247219 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k4dk5" event={"ID":"82623e03-029a-4955-92c9-0a5cb52e074b","Type":"ContainerStarted","Data":"5edc8c979f2e815d6def425cac7523161bf5ac03daf3994bab2fa31067325c54"} Oct 01 14:12:16 crc kubenswrapper[4605]: I1001 14:12:16.745158 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-7jk9j"] Oct 01 14:12:16 crc kubenswrapper[4605]: I1001 14:12:16.747288 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7jk9j" Oct 01 14:12:16 crc kubenswrapper[4605]: I1001 14:12:16.767913 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-7jk9j"] Oct 01 14:12:16 crc kubenswrapper[4605]: I1001 14:12:16.896467 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/caba4a21-c5a9-470a-a1ca-f8508b3f3059-utilities\") pod \"community-operators-7jk9j\" (UID: \"caba4a21-c5a9-470a-a1ca-f8508b3f3059\") " pod="openshift-marketplace/community-operators-7jk9j" Oct 01 14:12:16 crc kubenswrapper[4605]: I1001 14:12:16.896757 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4n7lr\" (UniqueName: \"kubernetes.io/projected/caba4a21-c5a9-470a-a1ca-f8508b3f3059-kube-api-access-4n7lr\") pod \"community-operators-7jk9j\" (UID: \"caba4a21-c5a9-470a-a1ca-f8508b3f3059\") " pod="openshift-marketplace/community-operators-7jk9j" Oct 01 14:12:16 crc kubenswrapper[4605]: I1001 14:12:16.896926 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/caba4a21-c5a9-470a-a1ca-f8508b3f3059-catalog-content\") pod \"community-operators-7jk9j\" (UID: \"caba4a21-c5a9-470a-a1ca-f8508b3f3059\") " pod="openshift-marketplace/community-operators-7jk9j" Oct 01 14:12:17 crc kubenswrapper[4605]: I1001 14:12:17.000201 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/caba4a21-c5a9-470a-a1ca-f8508b3f3059-utilities\") pod \"community-operators-7jk9j\" (UID: \"caba4a21-c5a9-470a-a1ca-f8508b3f3059\") " pod="openshift-marketplace/community-operators-7jk9j" Oct 01 14:12:17 crc kubenswrapper[4605]: I1001 14:12:17.000269 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4n7lr\" (UniqueName: \"kubernetes.io/projected/caba4a21-c5a9-470a-a1ca-f8508b3f3059-kube-api-access-4n7lr\") pod \"community-operators-7jk9j\" (UID: \"caba4a21-c5a9-470a-a1ca-f8508b3f3059\") " pod="openshift-marketplace/community-operators-7jk9j" Oct 01 14:12:17 crc kubenswrapper[4605]: I1001 14:12:17.000384 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/caba4a21-c5a9-470a-a1ca-f8508b3f3059-catalog-content\") pod \"community-operators-7jk9j\" (UID: \"caba4a21-c5a9-470a-a1ca-f8508b3f3059\") " pod="openshift-marketplace/community-operators-7jk9j" Oct 01 14:12:17 crc kubenswrapper[4605]: I1001 14:12:17.000694 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/caba4a21-c5a9-470a-a1ca-f8508b3f3059-utilities\") pod \"community-operators-7jk9j\" (UID: \"caba4a21-c5a9-470a-a1ca-f8508b3f3059\") " pod="openshift-marketplace/community-operators-7jk9j" Oct 01 14:12:17 crc kubenswrapper[4605]: I1001 14:12:17.000834 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/caba4a21-c5a9-470a-a1ca-f8508b3f3059-catalog-content\") pod \"community-operators-7jk9j\" (UID: \"caba4a21-c5a9-470a-a1ca-f8508b3f3059\") " pod="openshift-marketplace/community-operators-7jk9j" Oct 01 14:12:17 crc kubenswrapper[4605]: I1001 14:12:17.024153 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4n7lr\" (UniqueName: \"kubernetes.io/projected/caba4a21-c5a9-470a-a1ca-f8508b3f3059-kube-api-access-4n7lr\") pod \"community-operators-7jk9j\" (UID: \"caba4a21-c5a9-470a-a1ca-f8508b3f3059\") " pod="openshift-marketplace/community-operators-7jk9j" Oct 01 14:12:17 crc kubenswrapper[4605]: I1001 14:12:17.065969 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7jk9j" Oct 01 14:12:17 crc kubenswrapper[4605]: I1001 14:12:17.263224 4605 generic.go:334] "Generic (PLEG): container finished" podID="82623e03-029a-4955-92c9-0a5cb52e074b" containerID="2be78812bd2ac98b2522182d74955cbdc8a7d240d69a7869e9e7b3318b882e1b" exitCode=0 Oct 01 14:12:17 crc kubenswrapper[4605]: I1001 14:12:17.263267 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k4dk5" event={"ID":"82623e03-029a-4955-92c9-0a5cb52e074b","Type":"ContainerDied","Data":"2be78812bd2ac98b2522182d74955cbdc8a7d240d69a7869e9e7b3318b882e1b"} Oct 01 14:12:17 crc kubenswrapper[4605]: I1001 14:12:17.540492 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-7jk9j"] Oct 01 14:12:18 crc kubenswrapper[4605]: I1001 14:12:18.277880 4605 generic.go:334] "Generic (PLEG): container finished" podID="caba4a21-c5a9-470a-a1ca-f8508b3f3059" containerID="c5d4dd54009c313552347f8c830a1dfd983b4c19fdb8099ca25c824f48061208" exitCode=0 Oct 01 14:12:18 crc kubenswrapper[4605]: I1001 14:12:18.278256 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7jk9j" event={"ID":"caba4a21-c5a9-470a-a1ca-f8508b3f3059","Type":"ContainerDied","Data":"c5d4dd54009c313552347f8c830a1dfd983b4c19fdb8099ca25c824f48061208"} Oct 01 14:12:18 crc kubenswrapper[4605]: I1001 14:12:18.278483 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7jk9j" event={"ID":"caba4a21-c5a9-470a-a1ca-f8508b3f3059","Type":"ContainerStarted","Data":"a5bde063d1813fe9c97dbbaa7353001bd8b0838c6fb225a06d3b50a4f1843d1c"} Oct 01 14:12:19 crc kubenswrapper[4605]: I1001 14:12:19.291232 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k4dk5" event={"ID":"82623e03-029a-4955-92c9-0a5cb52e074b","Type":"ContainerStarted","Data":"09572f26ee87c6c733c129167ef1c34494763d9a10e15413d9e2eb3cc3b029e8"} Oct 01 14:12:20 crc kubenswrapper[4605]: I1001 14:12:20.307579 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7jk9j" event={"ID":"caba4a21-c5a9-470a-a1ca-f8508b3f3059","Type":"ContainerStarted","Data":"df7caffe6b3167cb5ae9a220d40dc1e7d032ec09b996a80be2fcab38387b3189"} Oct 01 14:12:20 crc kubenswrapper[4605]: I1001 14:12:20.310783 4605 generic.go:334] "Generic (PLEG): container finished" podID="82623e03-029a-4955-92c9-0a5cb52e074b" containerID="09572f26ee87c6c733c129167ef1c34494763d9a10e15413d9e2eb3cc3b029e8" exitCode=0 Oct 01 14:12:20 crc kubenswrapper[4605]: I1001 14:12:20.310936 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k4dk5" event={"ID":"82623e03-029a-4955-92c9-0a5cb52e074b","Type":"ContainerDied","Data":"09572f26ee87c6c733c129167ef1c34494763d9a10e15413d9e2eb3cc3b029e8"} Oct 01 14:12:21 crc kubenswrapper[4605]: I1001 14:12:21.321261 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k4dk5" event={"ID":"82623e03-029a-4955-92c9-0a5cb52e074b","Type":"ContainerStarted","Data":"972024cc752617c1697745d95bd3f2645cc7b43c00b4447f11e6f726af6b0ce5"} Oct 01 14:12:23 crc kubenswrapper[4605]: I1001 14:12:23.349776 4605 generic.go:334] "Generic (PLEG): container finished" podID="caba4a21-c5a9-470a-a1ca-f8508b3f3059" containerID="df7caffe6b3167cb5ae9a220d40dc1e7d032ec09b996a80be2fcab38387b3189" exitCode=0 Oct 01 14:12:23 crc kubenswrapper[4605]: I1001 14:12:23.349862 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7jk9j" event={"ID":"caba4a21-c5a9-470a-a1ca-f8508b3f3059","Type":"ContainerDied","Data":"df7caffe6b3167cb5ae9a220d40dc1e7d032ec09b996a80be2fcab38387b3189"} Oct 01 14:12:23 crc kubenswrapper[4605]: I1001 14:12:23.365240 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-k4dk5" podStartSLOduration=4.775940988 podStartE2EDuration="8.365224553s" podCreationTimestamp="2025-10-01 14:12:15 +0000 UTC" firstStartedPulling="2025-10-01 14:12:17.27593989 +0000 UTC m=+1660.019916098" lastFinishedPulling="2025-10-01 14:12:20.865223455 +0000 UTC m=+1663.609199663" observedRunningTime="2025-10-01 14:12:22.357083401 +0000 UTC m=+1665.101059609" watchObservedRunningTime="2025-10-01 14:12:23.365224553 +0000 UTC m=+1666.109200761" Oct 01 14:12:24 crc kubenswrapper[4605]: I1001 14:12:24.361647 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7jk9j" event={"ID":"caba4a21-c5a9-470a-a1ca-f8508b3f3059","Type":"ContainerStarted","Data":"4b15ecaadccaa50bc3a7d0ca7df503e594e483e7cb7bb5cbfc5b9f4de92c66fe"} Oct 01 14:12:24 crc kubenswrapper[4605]: I1001 14:12:24.385954 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-7jk9j" podStartSLOduration=2.854739887 podStartE2EDuration="8.385935753s" podCreationTimestamp="2025-10-01 14:12:16 +0000 UTC" firstStartedPulling="2025-10-01 14:12:18.296637399 +0000 UTC m=+1661.040613607" lastFinishedPulling="2025-10-01 14:12:23.827833265 +0000 UTC m=+1666.571809473" observedRunningTime="2025-10-01 14:12:24.379284565 +0000 UTC m=+1667.123260803" watchObservedRunningTime="2025-10-01 14:12:24.385935753 +0000 UTC m=+1667.129911961" Oct 01 14:12:25 crc kubenswrapper[4605]: I1001 14:12:25.671983 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-k4dk5" Oct 01 14:12:25 crc kubenswrapper[4605]: I1001 14:12:25.672052 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-k4dk5" Oct 01 14:12:26 crc kubenswrapper[4605]: I1001 14:12:26.722746 4605 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-k4dk5" podUID="82623e03-029a-4955-92c9-0a5cb52e074b" containerName="registry-server" probeResult="failure" output=< Oct 01 14:12:26 crc kubenswrapper[4605]: timeout: failed to connect service ":50051" within 1s Oct 01 14:12:26 crc kubenswrapper[4605]: > Oct 01 14:12:27 crc kubenswrapper[4605]: I1001 14:12:27.067248 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-7jk9j" Oct 01 14:12:27 crc kubenswrapper[4605]: I1001 14:12:27.067300 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-7jk9j" Oct 01 14:12:27 crc kubenswrapper[4605]: I1001 14:12:27.111741 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-7jk9j" Oct 01 14:12:28 crc kubenswrapper[4605]: I1001 14:12:28.927176 4605 scope.go:117] "RemoveContainer" containerID="3c03d2675f439d8ac4c6a292ee6900d11d4b18b69f30da560ed0997de5eb5196" Oct 01 14:12:28 crc kubenswrapper[4605]: E1001 14:12:28.927688 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:12:35 crc kubenswrapper[4605]: I1001 14:12:35.727168 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-k4dk5" Oct 01 14:12:35 crc kubenswrapper[4605]: I1001 14:12:35.791407 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-k4dk5" Oct 01 14:12:35 crc kubenswrapper[4605]: I1001 14:12:35.965845 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-k4dk5"] Oct 01 14:12:37 crc kubenswrapper[4605]: I1001 14:12:37.115629 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-7jk9j" Oct 01 14:12:37 crc kubenswrapper[4605]: I1001 14:12:37.490868 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-k4dk5" podUID="82623e03-029a-4955-92c9-0a5cb52e074b" containerName="registry-server" containerID="cri-o://972024cc752617c1697745d95bd3f2645cc7b43c00b4447f11e6f726af6b0ce5" gracePeriod=2 Oct 01 14:12:37 crc kubenswrapper[4605]: I1001 14:12:37.945931 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-k4dk5" Oct 01 14:12:38 crc kubenswrapper[4605]: I1001 14:12:38.147313 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/82623e03-029a-4955-92c9-0a5cb52e074b-catalog-content\") pod \"82623e03-029a-4955-92c9-0a5cb52e074b\" (UID: \"82623e03-029a-4955-92c9-0a5cb52e074b\") " Oct 01 14:12:38 crc kubenswrapper[4605]: I1001 14:12:38.147813 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/82623e03-029a-4955-92c9-0a5cb52e074b-utilities\") pod \"82623e03-029a-4955-92c9-0a5cb52e074b\" (UID: \"82623e03-029a-4955-92c9-0a5cb52e074b\") " Oct 01 14:12:38 crc kubenswrapper[4605]: I1001 14:12:38.147901 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l8xxm\" (UniqueName: \"kubernetes.io/projected/82623e03-029a-4955-92c9-0a5cb52e074b-kube-api-access-l8xxm\") pod \"82623e03-029a-4955-92c9-0a5cb52e074b\" (UID: \"82623e03-029a-4955-92c9-0a5cb52e074b\") " Oct 01 14:12:38 crc kubenswrapper[4605]: I1001 14:12:38.148813 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/82623e03-029a-4955-92c9-0a5cb52e074b-utilities" (OuterVolumeSpecName: "utilities") pod "82623e03-029a-4955-92c9-0a5cb52e074b" (UID: "82623e03-029a-4955-92c9-0a5cb52e074b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:12:38 crc kubenswrapper[4605]: I1001 14:12:38.164351 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/82623e03-029a-4955-92c9-0a5cb52e074b-kube-api-access-l8xxm" (OuterVolumeSpecName: "kube-api-access-l8xxm") pod "82623e03-029a-4955-92c9-0a5cb52e074b" (UID: "82623e03-029a-4955-92c9-0a5cb52e074b"). InnerVolumeSpecName "kube-api-access-l8xxm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:12:38 crc kubenswrapper[4605]: I1001 14:12:38.201687 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/82623e03-029a-4955-92c9-0a5cb52e074b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "82623e03-029a-4955-92c9-0a5cb52e074b" (UID: "82623e03-029a-4955-92c9-0a5cb52e074b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:12:38 crc kubenswrapper[4605]: I1001 14:12:38.250514 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l8xxm\" (UniqueName: \"kubernetes.io/projected/82623e03-029a-4955-92c9-0a5cb52e074b-kube-api-access-l8xxm\") on node \"crc\" DevicePath \"\"" Oct 01 14:12:38 crc kubenswrapper[4605]: I1001 14:12:38.250561 4605 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/82623e03-029a-4955-92c9-0a5cb52e074b-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 14:12:38 crc kubenswrapper[4605]: I1001 14:12:38.250571 4605 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/82623e03-029a-4955-92c9-0a5cb52e074b-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 14:12:38 crc kubenswrapper[4605]: I1001 14:12:38.501362 4605 generic.go:334] "Generic (PLEG): container finished" podID="82623e03-029a-4955-92c9-0a5cb52e074b" containerID="972024cc752617c1697745d95bd3f2645cc7b43c00b4447f11e6f726af6b0ce5" exitCode=0 Oct 01 14:12:38 crc kubenswrapper[4605]: I1001 14:12:38.501396 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k4dk5" event={"ID":"82623e03-029a-4955-92c9-0a5cb52e074b","Type":"ContainerDied","Data":"972024cc752617c1697745d95bd3f2645cc7b43c00b4447f11e6f726af6b0ce5"} Oct 01 14:12:38 crc kubenswrapper[4605]: I1001 14:12:38.501435 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k4dk5" event={"ID":"82623e03-029a-4955-92c9-0a5cb52e074b","Type":"ContainerDied","Data":"5edc8c979f2e815d6def425cac7523161bf5ac03daf3994bab2fa31067325c54"} Oct 01 14:12:38 crc kubenswrapper[4605]: I1001 14:12:38.501451 4605 scope.go:117] "RemoveContainer" containerID="972024cc752617c1697745d95bd3f2645cc7b43c00b4447f11e6f726af6b0ce5" Oct 01 14:12:38 crc kubenswrapper[4605]: I1001 14:12:38.501449 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-k4dk5" Oct 01 14:12:38 crc kubenswrapper[4605]: I1001 14:12:38.522052 4605 scope.go:117] "RemoveContainer" containerID="09572f26ee87c6c733c129167ef1c34494763d9a10e15413d9e2eb3cc3b029e8" Oct 01 14:12:38 crc kubenswrapper[4605]: I1001 14:12:38.559240 4605 scope.go:117] "RemoveContainer" containerID="2be78812bd2ac98b2522182d74955cbdc8a7d240d69a7869e9e7b3318b882e1b" Oct 01 14:12:38 crc kubenswrapper[4605]: I1001 14:12:38.571720 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-k4dk5"] Oct 01 14:12:38 crc kubenswrapper[4605]: I1001 14:12:38.580312 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-k4dk5"] Oct 01 14:12:38 crc kubenswrapper[4605]: I1001 14:12:38.587646 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-7jk9j"] Oct 01 14:12:38 crc kubenswrapper[4605]: I1001 14:12:38.587922 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-7jk9j" podUID="caba4a21-c5a9-470a-a1ca-f8508b3f3059" containerName="registry-server" containerID="cri-o://4b15ecaadccaa50bc3a7d0ca7df503e594e483e7cb7bb5cbfc5b9f4de92c66fe" gracePeriod=2 Oct 01 14:12:38 crc kubenswrapper[4605]: I1001 14:12:38.612049 4605 scope.go:117] "RemoveContainer" containerID="972024cc752617c1697745d95bd3f2645cc7b43c00b4447f11e6f726af6b0ce5" Oct 01 14:12:38 crc kubenswrapper[4605]: E1001 14:12:38.612490 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"972024cc752617c1697745d95bd3f2645cc7b43c00b4447f11e6f726af6b0ce5\": container with ID starting with 972024cc752617c1697745d95bd3f2645cc7b43c00b4447f11e6f726af6b0ce5 not found: ID does not exist" containerID="972024cc752617c1697745d95bd3f2645cc7b43c00b4447f11e6f726af6b0ce5" Oct 01 14:12:38 crc kubenswrapper[4605]: I1001 14:12:38.612532 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"972024cc752617c1697745d95bd3f2645cc7b43c00b4447f11e6f726af6b0ce5"} err="failed to get container status \"972024cc752617c1697745d95bd3f2645cc7b43c00b4447f11e6f726af6b0ce5\": rpc error: code = NotFound desc = could not find container \"972024cc752617c1697745d95bd3f2645cc7b43c00b4447f11e6f726af6b0ce5\": container with ID starting with 972024cc752617c1697745d95bd3f2645cc7b43c00b4447f11e6f726af6b0ce5 not found: ID does not exist" Oct 01 14:12:38 crc kubenswrapper[4605]: I1001 14:12:38.612558 4605 scope.go:117] "RemoveContainer" containerID="09572f26ee87c6c733c129167ef1c34494763d9a10e15413d9e2eb3cc3b029e8" Oct 01 14:12:38 crc kubenswrapper[4605]: E1001 14:12:38.612884 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"09572f26ee87c6c733c129167ef1c34494763d9a10e15413d9e2eb3cc3b029e8\": container with ID starting with 09572f26ee87c6c733c129167ef1c34494763d9a10e15413d9e2eb3cc3b029e8 not found: ID does not exist" containerID="09572f26ee87c6c733c129167ef1c34494763d9a10e15413d9e2eb3cc3b029e8" Oct 01 14:12:38 crc kubenswrapper[4605]: I1001 14:12:38.612929 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"09572f26ee87c6c733c129167ef1c34494763d9a10e15413d9e2eb3cc3b029e8"} err="failed to get container status \"09572f26ee87c6c733c129167ef1c34494763d9a10e15413d9e2eb3cc3b029e8\": rpc error: code = NotFound desc = could not find container \"09572f26ee87c6c733c129167ef1c34494763d9a10e15413d9e2eb3cc3b029e8\": container with ID starting with 09572f26ee87c6c733c129167ef1c34494763d9a10e15413d9e2eb3cc3b029e8 not found: ID does not exist" Oct 01 14:12:38 crc kubenswrapper[4605]: I1001 14:12:38.612974 4605 scope.go:117] "RemoveContainer" containerID="2be78812bd2ac98b2522182d74955cbdc8a7d240d69a7869e9e7b3318b882e1b" Oct 01 14:12:38 crc kubenswrapper[4605]: E1001 14:12:38.613880 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2be78812bd2ac98b2522182d74955cbdc8a7d240d69a7869e9e7b3318b882e1b\": container with ID starting with 2be78812bd2ac98b2522182d74955cbdc8a7d240d69a7869e9e7b3318b882e1b not found: ID does not exist" containerID="2be78812bd2ac98b2522182d74955cbdc8a7d240d69a7869e9e7b3318b882e1b" Oct 01 14:12:38 crc kubenswrapper[4605]: I1001 14:12:38.613912 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2be78812bd2ac98b2522182d74955cbdc8a7d240d69a7869e9e7b3318b882e1b"} err="failed to get container status \"2be78812bd2ac98b2522182d74955cbdc8a7d240d69a7869e9e7b3318b882e1b\": rpc error: code = NotFound desc = could not find container \"2be78812bd2ac98b2522182d74955cbdc8a7d240d69a7869e9e7b3318b882e1b\": container with ID starting with 2be78812bd2ac98b2522182d74955cbdc8a7d240d69a7869e9e7b3318b882e1b not found: ID does not exist" Oct 01 14:12:39 crc kubenswrapper[4605]: I1001 14:12:39.063650 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7jk9j" Oct 01 14:12:39 crc kubenswrapper[4605]: I1001 14:12:39.068754 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/caba4a21-c5a9-470a-a1ca-f8508b3f3059-catalog-content\") pod \"caba4a21-c5a9-470a-a1ca-f8508b3f3059\" (UID: \"caba4a21-c5a9-470a-a1ca-f8508b3f3059\") " Oct 01 14:12:39 crc kubenswrapper[4605]: I1001 14:12:39.068808 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/caba4a21-c5a9-470a-a1ca-f8508b3f3059-utilities\") pod \"caba4a21-c5a9-470a-a1ca-f8508b3f3059\" (UID: \"caba4a21-c5a9-470a-a1ca-f8508b3f3059\") " Oct 01 14:12:39 crc kubenswrapper[4605]: I1001 14:12:39.068867 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4n7lr\" (UniqueName: \"kubernetes.io/projected/caba4a21-c5a9-470a-a1ca-f8508b3f3059-kube-api-access-4n7lr\") pod \"caba4a21-c5a9-470a-a1ca-f8508b3f3059\" (UID: \"caba4a21-c5a9-470a-a1ca-f8508b3f3059\") " Oct 01 14:12:39 crc kubenswrapper[4605]: I1001 14:12:39.069764 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/caba4a21-c5a9-470a-a1ca-f8508b3f3059-utilities" (OuterVolumeSpecName: "utilities") pod "caba4a21-c5a9-470a-a1ca-f8508b3f3059" (UID: "caba4a21-c5a9-470a-a1ca-f8508b3f3059"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:12:39 crc kubenswrapper[4605]: I1001 14:12:39.076480 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/caba4a21-c5a9-470a-a1ca-f8508b3f3059-kube-api-access-4n7lr" (OuterVolumeSpecName: "kube-api-access-4n7lr") pod "caba4a21-c5a9-470a-a1ca-f8508b3f3059" (UID: "caba4a21-c5a9-470a-a1ca-f8508b3f3059"). InnerVolumeSpecName "kube-api-access-4n7lr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:12:39 crc kubenswrapper[4605]: I1001 14:12:39.134312 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/caba4a21-c5a9-470a-a1ca-f8508b3f3059-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "caba4a21-c5a9-470a-a1ca-f8508b3f3059" (UID: "caba4a21-c5a9-470a-a1ca-f8508b3f3059"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:12:39 crc kubenswrapper[4605]: I1001 14:12:39.170247 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4n7lr\" (UniqueName: \"kubernetes.io/projected/caba4a21-c5a9-470a-a1ca-f8508b3f3059-kube-api-access-4n7lr\") on node \"crc\" DevicePath \"\"" Oct 01 14:12:39 crc kubenswrapper[4605]: I1001 14:12:39.170284 4605 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/caba4a21-c5a9-470a-a1ca-f8508b3f3059-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 14:12:39 crc kubenswrapper[4605]: I1001 14:12:39.170298 4605 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/caba4a21-c5a9-470a-a1ca-f8508b3f3059-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 14:12:39 crc kubenswrapper[4605]: I1001 14:12:39.511113 4605 generic.go:334] "Generic (PLEG): container finished" podID="caba4a21-c5a9-470a-a1ca-f8508b3f3059" containerID="4b15ecaadccaa50bc3a7d0ca7df503e594e483e7cb7bb5cbfc5b9f4de92c66fe" exitCode=0 Oct 01 14:12:39 crc kubenswrapper[4605]: I1001 14:12:39.511174 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7jk9j" event={"ID":"caba4a21-c5a9-470a-a1ca-f8508b3f3059","Type":"ContainerDied","Data":"4b15ecaadccaa50bc3a7d0ca7df503e594e483e7cb7bb5cbfc5b9f4de92c66fe"} Oct 01 14:12:39 crc kubenswrapper[4605]: I1001 14:12:39.511220 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7jk9j" event={"ID":"caba4a21-c5a9-470a-a1ca-f8508b3f3059","Type":"ContainerDied","Data":"a5bde063d1813fe9c97dbbaa7353001bd8b0838c6fb225a06d3b50a4f1843d1c"} Oct 01 14:12:39 crc kubenswrapper[4605]: I1001 14:12:39.511237 4605 scope.go:117] "RemoveContainer" containerID="4b15ecaadccaa50bc3a7d0ca7df503e594e483e7cb7bb5cbfc5b9f4de92c66fe" Oct 01 14:12:39 crc kubenswrapper[4605]: I1001 14:12:39.511424 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7jk9j" Oct 01 14:12:39 crc kubenswrapper[4605]: I1001 14:12:39.542218 4605 scope.go:117] "RemoveContainer" containerID="df7caffe6b3167cb5ae9a220d40dc1e7d032ec09b996a80be2fcab38387b3189" Oct 01 14:12:39 crc kubenswrapper[4605]: I1001 14:12:39.542230 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-7jk9j"] Oct 01 14:12:39 crc kubenswrapper[4605]: I1001 14:12:39.551021 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-7jk9j"] Oct 01 14:12:39 crc kubenswrapper[4605]: I1001 14:12:39.566661 4605 scope.go:117] "RemoveContainer" containerID="c5d4dd54009c313552347f8c830a1dfd983b4c19fdb8099ca25c824f48061208" Oct 01 14:12:39 crc kubenswrapper[4605]: I1001 14:12:39.585443 4605 scope.go:117] "RemoveContainer" containerID="4b15ecaadccaa50bc3a7d0ca7df503e594e483e7cb7bb5cbfc5b9f4de92c66fe" Oct 01 14:12:39 crc kubenswrapper[4605]: E1001 14:12:39.585870 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4b15ecaadccaa50bc3a7d0ca7df503e594e483e7cb7bb5cbfc5b9f4de92c66fe\": container with ID starting with 4b15ecaadccaa50bc3a7d0ca7df503e594e483e7cb7bb5cbfc5b9f4de92c66fe not found: ID does not exist" containerID="4b15ecaadccaa50bc3a7d0ca7df503e594e483e7cb7bb5cbfc5b9f4de92c66fe" Oct 01 14:12:39 crc kubenswrapper[4605]: I1001 14:12:39.585910 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4b15ecaadccaa50bc3a7d0ca7df503e594e483e7cb7bb5cbfc5b9f4de92c66fe"} err="failed to get container status \"4b15ecaadccaa50bc3a7d0ca7df503e594e483e7cb7bb5cbfc5b9f4de92c66fe\": rpc error: code = NotFound desc = could not find container \"4b15ecaadccaa50bc3a7d0ca7df503e594e483e7cb7bb5cbfc5b9f4de92c66fe\": container with ID starting with 4b15ecaadccaa50bc3a7d0ca7df503e594e483e7cb7bb5cbfc5b9f4de92c66fe not found: ID does not exist" Oct 01 14:12:39 crc kubenswrapper[4605]: I1001 14:12:39.585932 4605 scope.go:117] "RemoveContainer" containerID="df7caffe6b3167cb5ae9a220d40dc1e7d032ec09b996a80be2fcab38387b3189" Oct 01 14:12:39 crc kubenswrapper[4605]: E1001 14:12:39.586348 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"df7caffe6b3167cb5ae9a220d40dc1e7d032ec09b996a80be2fcab38387b3189\": container with ID starting with df7caffe6b3167cb5ae9a220d40dc1e7d032ec09b996a80be2fcab38387b3189 not found: ID does not exist" containerID="df7caffe6b3167cb5ae9a220d40dc1e7d032ec09b996a80be2fcab38387b3189" Oct 01 14:12:39 crc kubenswrapper[4605]: I1001 14:12:39.586496 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"df7caffe6b3167cb5ae9a220d40dc1e7d032ec09b996a80be2fcab38387b3189"} err="failed to get container status \"df7caffe6b3167cb5ae9a220d40dc1e7d032ec09b996a80be2fcab38387b3189\": rpc error: code = NotFound desc = could not find container \"df7caffe6b3167cb5ae9a220d40dc1e7d032ec09b996a80be2fcab38387b3189\": container with ID starting with df7caffe6b3167cb5ae9a220d40dc1e7d032ec09b996a80be2fcab38387b3189 not found: ID does not exist" Oct 01 14:12:39 crc kubenswrapper[4605]: I1001 14:12:39.586609 4605 scope.go:117] "RemoveContainer" containerID="c5d4dd54009c313552347f8c830a1dfd983b4c19fdb8099ca25c824f48061208" Oct 01 14:12:39 crc kubenswrapper[4605]: E1001 14:12:39.586935 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c5d4dd54009c313552347f8c830a1dfd983b4c19fdb8099ca25c824f48061208\": container with ID starting with c5d4dd54009c313552347f8c830a1dfd983b4c19fdb8099ca25c824f48061208 not found: ID does not exist" containerID="c5d4dd54009c313552347f8c830a1dfd983b4c19fdb8099ca25c824f48061208" Oct 01 14:12:39 crc kubenswrapper[4605]: I1001 14:12:39.587049 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c5d4dd54009c313552347f8c830a1dfd983b4c19fdb8099ca25c824f48061208"} err="failed to get container status \"c5d4dd54009c313552347f8c830a1dfd983b4c19fdb8099ca25c824f48061208\": rpc error: code = NotFound desc = could not find container \"c5d4dd54009c313552347f8c830a1dfd983b4c19fdb8099ca25c824f48061208\": container with ID starting with c5d4dd54009c313552347f8c830a1dfd983b4c19fdb8099ca25c824f48061208 not found: ID does not exist" Oct 01 14:12:39 crc kubenswrapper[4605]: I1001 14:12:39.936932 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="82623e03-029a-4955-92c9-0a5cb52e074b" path="/var/lib/kubelet/pods/82623e03-029a-4955-92c9-0a5cb52e074b/volumes" Oct 01 14:12:39 crc kubenswrapper[4605]: I1001 14:12:39.937604 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="caba4a21-c5a9-470a-a1ca-f8508b3f3059" path="/var/lib/kubelet/pods/caba4a21-c5a9-470a-a1ca-f8508b3f3059/volumes" Oct 01 14:12:41 crc kubenswrapper[4605]: I1001 14:12:41.926391 4605 scope.go:117] "RemoveContainer" containerID="3c03d2675f439d8ac4c6a292ee6900d11d4b18b69f30da560ed0997de5eb5196" Oct 01 14:12:41 crc kubenswrapper[4605]: E1001 14:12:41.927013 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:12:49 crc kubenswrapper[4605]: I1001 14:12:49.053519 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-px7rk"] Oct 01 14:12:49 crc kubenswrapper[4605]: I1001 14:12:49.062658 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-kdhkw"] Oct 01 14:12:49 crc kubenswrapper[4605]: I1001 14:12:49.069279 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-kdhkw"] Oct 01 14:12:49 crc kubenswrapper[4605]: I1001 14:12:49.077484 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-px7rk"] Oct 01 14:12:49 crc kubenswrapper[4605]: I1001 14:12:49.548009 4605 scope.go:117] "RemoveContainer" containerID="8ed9e1de2256f8929e97adf8a7f228aadc7ec923bca05813cc8b7a4001d8d274" Oct 01 14:12:49 crc kubenswrapper[4605]: I1001 14:12:49.580584 4605 scope.go:117] "RemoveContainer" containerID="9f92cb5c0d44c8434266608af6c70687d04a2eeec764a779f86548a949fc9fbe" Oct 01 14:12:49 crc kubenswrapper[4605]: I1001 14:12:49.750257 4605 scope.go:117] "RemoveContainer" containerID="c375d927f3cadfd99e69c736202c7116257743c6a3b03d725027db35f7063dc9" Oct 01 14:12:49 crc kubenswrapper[4605]: I1001 14:12:49.783609 4605 scope.go:117] "RemoveContainer" containerID="691f8c0bb7a0d3ce9fe779a35a8a542df3c03aee0ed2bddffb343283c537560d" Oct 01 14:12:49 crc kubenswrapper[4605]: I1001 14:12:49.854718 4605 scope.go:117] "RemoveContainer" containerID="71654a37e926f58fb41b42b8f741d52e76797cfe45508d713c92ad418aa8d09d" Oct 01 14:12:49 crc kubenswrapper[4605]: I1001 14:12:49.906365 4605 scope.go:117] "RemoveContainer" containerID="b4e365d933fae88e574955c8f9db28bf719af1d77a020daeb9f4f55ae9f7eea1" Oct 01 14:12:49 crc kubenswrapper[4605]: I1001 14:12:49.967627 4605 scope.go:117] "RemoveContainer" containerID="d5bca519a32c6b26704959dbde2c064e93fc1a192869bc083ddbccb63bd00d41" Oct 01 14:12:49 crc kubenswrapper[4605]: I1001 14:12:49.969650 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb" path="/var/lib/kubelet/pods/9bfc1afb-d5bf-4261-9a9a-e00fced2e3bb/volumes" Oct 01 14:12:49 crc kubenswrapper[4605]: I1001 14:12:49.978866 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a3bd053b-19f8-4908-b8d5-e5c0ae5599c0" path="/var/lib/kubelet/pods/a3bd053b-19f8-4908-b8d5-e5c0ae5599c0/volumes" Oct 01 14:12:50 crc kubenswrapper[4605]: I1001 14:12:50.011841 4605 scope.go:117] "RemoveContainer" containerID="f1f86ad225db6bc5faf7414010774230b9ccc60221c1d46a59daf167676f1ce6" Oct 01 14:12:54 crc kubenswrapper[4605]: I1001 14:12:54.926925 4605 scope.go:117] "RemoveContainer" containerID="3c03d2675f439d8ac4c6a292ee6900d11d4b18b69f30da560ed0997de5eb5196" Oct 01 14:12:54 crc kubenswrapper[4605]: E1001 14:12:54.932243 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:13:06 crc kubenswrapper[4605]: I1001 14:13:06.926757 4605 scope.go:117] "RemoveContainer" containerID="3c03d2675f439d8ac4c6a292ee6900d11d4b18b69f30da560ed0997de5eb5196" Oct 01 14:13:06 crc kubenswrapper[4605]: E1001 14:13:06.927620 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:13:10 crc kubenswrapper[4605]: I1001 14:13:10.043739 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-xj5tw"] Oct 01 14:13:10 crc kubenswrapper[4605]: I1001 14:13:10.052470 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-xj5tw"] Oct 01 14:13:11 crc kubenswrapper[4605]: I1001 14:13:11.802853 4605 generic.go:334] "Generic (PLEG): container finished" podID="88958218-6061-4e38-b6fd-88b9502ebf30" containerID="c68d6d2bb2bb5c45866bfc453a5a6e7fddba44c4357f12f96f207d2cbdbe9e71" exitCode=0 Oct 01 14:13:11 crc kubenswrapper[4605]: I1001 14:13:11.802934 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-8c89v" event={"ID":"88958218-6061-4e38-b6fd-88b9502ebf30","Type":"ContainerDied","Data":"c68d6d2bb2bb5c45866bfc453a5a6e7fddba44c4357f12f96f207d2cbdbe9e71"} Oct 01 14:13:11 crc kubenswrapper[4605]: I1001 14:13:11.938417 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e8f994e1-7878-41ab-b619-4946d957e710" path="/var/lib/kubelet/pods/e8f994e1-7878-41ab-b619-4946d957e710/volumes" Oct 01 14:13:13 crc kubenswrapper[4605]: I1001 14:13:13.220057 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-8c89v" Oct 01 14:13:13 crc kubenswrapper[4605]: I1001 14:13:13.316614 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/88958218-6061-4e38-b6fd-88b9502ebf30-inventory\") pod \"88958218-6061-4e38-b6fd-88b9502ebf30\" (UID: \"88958218-6061-4e38-b6fd-88b9502ebf30\") " Oct 01 14:13:13 crc kubenswrapper[4605]: I1001 14:13:13.316725 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hbkp2\" (UniqueName: \"kubernetes.io/projected/88958218-6061-4e38-b6fd-88b9502ebf30-kube-api-access-hbkp2\") pod \"88958218-6061-4e38-b6fd-88b9502ebf30\" (UID: \"88958218-6061-4e38-b6fd-88b9502ebf30\") " Oct 01 14:13:13 crc kubenswrapper[4605]: I1001 14:13:13.316762 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/88958218-6061-4e38-b6fd-88b9502ebf30-ssh-key\") pod \"88958218-6061-4e38-b6fd-88b9502ebf30\" (UID: \"88958218-6061-4e38-b6fd-88b9502ebf30\") " Oct 01 14:13:13 crc kubenswrapper[4605]: I1001 14:13:13.326218 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/88958218-6061-4e38-b6fd-88b9502ebf30-kube-api-access-hbkp2" (OuterVolumeSpecName: "kube-api-access-hbkp2") pod "88958218-6061-4e38-b6fd-88b9502ebf30" (UID: "88958218-6061-4e38-b6fd-88b9502ebf30"). InnerVolumeSpecName "kube-api-access-hbkp2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:13:13 crc kubenswrapper[4605]: I1001 14:13:13.352051 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/88958218-6061-4e38-b6fd-88b9502ebf30-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "88958218-6061-4e38-b6fd-88b9502ebf30" (UID: "88958218-6061-4e38-b6fd-88b9502ebf30"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:13:13 crc kubenswrapper[4605]: I1001 14:13:13.360917 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/88958218-6061-4e38-b6fd-88b9502ebf30-inventory" (OuterVolumeSpecName: "inventory") pod "88958218-6061-4e38-b6fd-88b9502ebf30" (UID: "88958218-6061-4e38-b6fd-88b9502ebf30"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:13:13 crc kubenswrapper[4605]: I1001 14:13:13.420357 4605 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/88958218-6061-4e38-b6fd-88b9502ebf30-inventory\") on node \"crc\" DevicePath \"\"" Oct 01 14:13:13 crc kubenswrapper[4605]: I1001 14:13:13.420396 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hbkp2\" (UniqueName: \"kubernetes.io/projected/88958218-6061-4e38-b6fd-88b9502ebf30-kube-api-access-hbkp2\") on node \"crc\" DevicePath \"\"" Oct 01 14:13:13 crc kubenswrapper[4605]: I1001 14:13:13.420407 4605 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/88958218-6061-4e38-b6fd-88b9502ebf30-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 01 14:13:13 crc kubenswrapper[4605]: I1001 14:13:13.821804 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-8c89v" event={"ID":"88958218-6061-4e38-b6fd-88b9502ebf30","Type":"ContainerDied","Data":"c8690313b0c657776a7472238ee03a413e027f28bec34a5714ba910ed91f3521"} Oct 01 14:13:13 crc kubenswrapper[4605]: I1001 14:13:13.821848 4605 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c8690313b0c657776a7472238ee03a413e027f28bec34a5714ba910ed91f3521" Oct 01 14:13:13 crc kubenswrapper[4605]: I1001 14:13:13.821847 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-8c89v" Oct 01 14:13:13 crc kubenswrapper[4605]: I1001 14:13:13.907464 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-9pc94"] Oct 01 14:13:13 crc kubenswrapper[4605]: E1001 14:13:13.908190 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="82623e03-029a-4955-92c9-0a5cb52e074b" containerName="registry-server" Oct 01 14:13:13 crc kubenswrapper[4605]: I1001 14:13:13.908219 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="82623e03-029a-4955-92c9-0a5cb52e074b" containerName="registry-server" Oct 01 14:13:13 crc kubenswrapper[4605]: E1001 14:13:13.908243 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="caba4a21-c5a9-470a-a1ca-f8508b3f3059" containerName="registry-server" Oct 01 14:13:13 crc kubenswrapper[4605]: I1001 14:13:13.908252 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="caba4a21-c5a9-470a-a1ca-f8508b3f3059" containerName="registry-server" Oct 01 14:13:13 crc kubenswrapper[4605]: E1001 14:13:13.908274 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="82623e03-029a-4955-92c9-0a5cb52e074b" containerName="extract-utilities" Oct 01 14:13:13 crc kubenswrapper[4605]: I1001 14:13:13.908283 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="82623e03-029a-4955-92c9-0a5cb52e074b" containerName="extract-utilities" Oct 01 14:13:13 crc kubenswrapper[4605]: E1001 14:13:13.908314 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="caba4a21-c5a9-470a-a1ca-f8508b3f3059" containerName="extract-content" Oct 01 14:13:13 crc kubenswrapper[4605]: I1001 14:13:13.908323 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="caba4a21-c5a9-470a-a1ca-f8508b3f3059" containerName="extract-content" Oct 01 14:13:13 crc kubenswrapper[4605]: E1001 14:13:13.908338 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="82623e03-029a-4955-92c9-0a5cb52e074b" containerName="extract-content" Oct 01 14:13:13 crc kubenswrapper[4605]: I1001 14:13:13.908346 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="82623e03-029a-4955-92c9-0a5cb52e074b" containerName="extract-content" Oct 01 14:13:13 crc kubenswrapper[4605]: E1001 14:13:13.908360 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="caba4a21-c5a9-470a-a1ca-f8508b3f3059" containerName="extract-utilities" Oct 01 14:13:13 crc kubenswrapper[4605]: I1001 14:13:13.908368 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="caba4a21-c5a9-470a-a1ca-f8508b3f3059" containerName="extract-utilities" Oct 01 14:13:13 crc kubenswrapper[4605]: E1001 14:13:13.908387 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="88958218-6061-4e38-b6fd-88b9502ebf30" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Oct 01 14:13:13 crc kubenswrapper[4605]: I1001 14:13:13.908397 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="88958218-6061-4e38-b6fd-88b9502ebf30" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Oct 01 14:13:13 crc kubenswrapper[4605]: I1001 14:13:13.908673 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="88958218-6061-4e38-b6fd-88b9502ebf30" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Oct 01 14:13:13 crc kubenswrapper[4605]: I1001 14:13:13.908709 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="82623e03-029a-4955-92c9-0a5cb52e074b" containerName="registry-server" Oct 01 14:13:13 crc kubenswrapper[4605]: I1001 14:13:13.908739 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="caba4a21-c5a9-470a-a1ca-f8508b3f3059" containerName="registry-server" Oct 01 14:13:13 crc kubenswrapper[4605]: I1001 14:13:13.909942 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-9pc94" Oct 01 14:13:13 crc kubenswrapper[4605]: I1001 14:13:13.914273 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 01 14:13:13 crc kubenswrapper[4605]: I1001 14:13:13.914378 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 01 14:13:13 crc kubenswrapper[4605]: I1001 14:13:13.914396 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 01 14:13:13 crc kubenswrapper[4605]: I1001 14:13:13.914497 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-dpzpx" Oct 01 14:13:13 crc kubenswrapper[4605]: I1001 14:13:13.921133 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-9pc94"] Oct 01 14:13:14 crc kubenswrapper[4605]: I1001 14:13:14.035224 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-js8nv\" (UniqueName: \"kubernetes.io/projected/31cfc16a-0d93-4cc4-9281-e4cee9664772-kube-api-access-js8nv\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-9pc94\" (UID: \"31cfc16a-0d93-4cc4-9281-e4cee9664772\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-9pc94" Oct 01 14:13:14 crc kubenswrapper[4605]: I1001 14:13:14.035431 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/31cfc16a-0d93-4cc4-9281-e4cee9664772-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-9pc94\" (UID: \"31cfc16a-0d93-4cc4-9281-e4cee9664772\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-9pc94" Oct 01 14:13:14 crc kubenswrapper[4605]: I1001 14:13:14.035551 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/31cfc16a-0d93-4cc4-9281-e4cee9664772-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-9pc94\" (UID: \"31cfc16a-0d93-4cc4-9281-e4cee9664772\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-9pc94" Oct 01 14:13:14 crc kubenswrapper[4605]: I1001 14:13:14.137754 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-js8nv\" (UniqueName: \"kubernetes.io/projected/31cfc16a-0d93-4cc4-9281-e4cee9664772-kube-api-access-js8nv\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-9pc94\" (UID: \"31cfc16a-0d93-4cc4-9281-e4cee9664772\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-9pc94" Oct 01 14:13:14 crc kubenswrapper[4605]: I1001 14:13:14.137838 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/31cfc16a-0d93-4cc4-9281-e4cee9664772-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-9pc94\" (UID: \"31cfc16a-0d93-4cc4-9281-e4cee9664772\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-9pc94" Oct 01 14:13:14 crc kubenswrapper[4605]: I1001 14:13:14.137868 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/31cfc16a-0d93-4cc4-9281-e4cee9664772-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-9pc94\" (UID: \"31cfc16a-0d93-4cc4-9281-e4cee9664772\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-9pc94" Oct 01 14:13:14 crc kubenswrapper[4605]: I1001 14:13:14.149625 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/31cfc16a-0d93-4cc4-9281-e4cee9664772-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-9pc94\" (UID: \"31cfc16a-0d93-4cc4-9281-e4cee9664772\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-9pc94" Oct 01 14:13:14 crc kubenswrapper[4605]: I1001 14:13:14.155297 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/31cfc16a-0d93-4cc4-9281-e4cee9664772-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-9pc94\" (UID: \"31cfc16a-0d93-4cc4-9281-e4cee9664772\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-9pc94" Oct 01 14:13:14 crc kubenswrapper[4605]: I1001 14:13:14.159947 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-js8nv\" (UniqueName: \"kubernetes.io/projected/31cfc16a-0d93-4cc4-9281-e4cee9664772-kube-api-access-js8nv\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-9pc94\" (UID: \"31cfc16a-0d93-4cc4-9281-e4cee9664772\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-9pc94" Oct 01 14:13:14 crc kubenswrapper[4605]: I1001 14:13:14.231749 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-9pc94" Oct 01 14:13:14 crc kubenswrapper[4605]: I1001 14:13:14.758830 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-9pc94"] Oct 01 14:13:14 crc kubenswrapper[4605]: I1001 14:13:14.829472 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-9pc94" event={"ID":"31cfc16a-0d93-4cc4-9281-e4cee9664772","Type":"ContainerStarted","Data":"0a576e533e09ba2766e5ce612dd94e15da09d7a66f0ee8d38fe1ed3a7c0ede82"} Oct 01 14:13:15 crc kubenswrapper[4605]: I1001 14:13:15.837797 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-9pc94" event={"ID":"31cfc16a-0d93-4cc4-9281-e4cee9664772","Type":"ContainerStarted","Data":"ebe5b7885168da5b34676e67f865a31cf00f80cd72777e2b3b592b59d017b769"} Oct 01 14:13:15 crc kubenswrapper[4605]: I1001 14:13:15.854575 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-9pc94" podStartSLOduration=2.176709458 podStartE2EDuration="2.854554358s" podCreationTimestamp="2025-10-01 14:13:13 +0000 UTC" firstStartedPulling="2025-10-01 14:13:14.782721123 +0000 UTC m=+1717.526697331" lastFinishedPulling="2025-10-01 14:13:15.460566003 +0000 UTC m=+1718.204542231" observedRunningTime="2025-10-01 14:13:15.849857709 +0000 UTC m=+1718.593833927" watchObservedRunningTime="2025-10-01 14:13:15.854554358 +0000 UTC m=+1718.598530566" Oct 01 14:13:17 crc kubenswrapper[4605]: I1001 14:13:17.024186 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-978c2"] Oct 01 14:13:17 crc kubenswrapper[4605]: I1001 14:13:17.031118 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-978c2"] Oct 01 14:13:17 crc kubenswrapper[4605]: I1001 14:13:17.943790 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="287fa988-b116-4b4d-a02c-990e801124d0" path="/var/lib/kubelet/pods/287fa988-b116-4b4d-a02c-990e801124d0/volumes" Oct 01 14:13:18 crc kubenswrapper[4605]: I1001 14:13:18.027247 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-bkj69"] Oct 01 14:13:18 crc kubenswrapper[4605]: I1001 14:13:18.035053 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-bkj69"] Oct 01 14:13:19 crc kubenswrapper[4605]: I1001 14:13:19.926539 4605 scope.go:117] "RemoveContainer" containerID="3c03d2675f439d8ac4c6a292ee6900d11d4b18b69f30da560ed0997de5eb5196" Oct 01 14:13:19 crc kubenswrapper[4605]: E1001 14:13:19.927068 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:13:19 crc kubenswrapper[4605]: I1001 14:13:19.936712 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f17ed625-db31-40c7-9958-f4b89f66ffa0" path="/var/lib/kubelet/pods/f17ed625-db31-40c7-9958-f4b89f66ffa0/volumes" Oct 01 14:13:33 crc kubenswrapper[4605]: I1001 14:13:33.926565 4605 scope.go:117] "RemoveContainer" containerID="3c03d2675f439d8ac4c6a292ee6900d11d4b18b69f30da560ed0997de5eb5196" Oct 01 14:13:33 crc kubenswrapper[4605]: E1001 14:13:33.927453 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:13:45 crc kubenswrapper[4605]: I1001 14:13:45.927112 4605 scope.go:117] "RemoveContainer" containerID="3c03d2675f439d8ac4c6a292ee6900d11d4b18b69f30da560ed0997de5eb5196" Oct 01 14:13:45 crc kubenswrapper[4605]: E1001 14:13:45.927892 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:13:50 crc kubenswrapper[4605]: I1001 14:13:50.518674 4605 scope.go:117] "RemoveContainer" containerID="d5c5696a982b9c00fbd496fa6dbc44a3ec17cc7729664ae0dcb2fd3916a2f948" Oct 01 14:13:50 crc kubenswrapper[4605]: I1001 14:13:50.571212 4605 scope.go:117] "RemoveContainer" containerID="5b4342682e1e4940b2836701ee9c809427752dc8e6f119c53e53b4610e2cf60a" Oct 01 14:13:50 crc kubenswrapper[4605]: I1001 14:13:50.609913 4605 scope.go:117] "RemoveContainer" containerID="5dd8a3eef8be60b024489f72e74f8f402a7c7268e1358dd126a7e73114f67f4a" Oct 01 14:13:50 crc kubenswrapper[4605]: I1001 14:13:50.665348 4605 scope.go:117] "RemoveContainer" containerID="36c926e892b2583480abb025793be6e8aee7874e4e154d9acd73ef5f94841bf7" Oct 01 14:13:50 crc kubenswrapper[4605]: I1001 14:13:50.726410 4605 scope.go:117] "RemoveContainer" containerID="f181348df26d79ee4c6e63fb8ad765de7771b451455895a1ee5602ae1069ed2e" Oct 01 14:13:57 crc kubenswrapper[4605]: I1001 14:13:57.046832 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-hrfc9"] Oct 01 14:13:57 crc kubenswrapper[4605]: I1001 14:13:57.054496 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-nwhc8"] Oct 01 14:13:57 crc kubenswrapper[4605]: I1001 14:13:57.064409 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-w4cl4"] Oct 01 14:13:57 crc kubenswrapper[4605]: I1001 14:13:57.072579 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-hrfc9"] Oct 01 14:13:57 crc kubenswrapper[4605]: I1001 14:13:57.080050 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-nwhc8"] Oct 01 14:13:57 crc kubenswrapper[4605]: I1001 14:13:57.087212 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-w4cl4"] Oct 01 14:13:57 crc kubenswrapper[4605]: I1001 14:13:57.982858 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1186ce81-fb42-4a2e-ad07-6a63d580a2b1" path="/var/lib/kubelet/pods/1186ce81-fb42-4a2e-ad07-6a63d580a2b1/volumes" Oct 01 14:13:57 crc kubenswrapper[4605]: I1001 14:13:57.984367 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="99b2e3e9-ede9-4ece-9e25-8d4900e33264" path="/var/lib/kubelet/pods/99b2e3e9-ede9-4ece-9e25-8d4900e33264/volumes" Oct 01 14:13:57 crc kubenswrapper[4605]: I1001 14:13:57.985261 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c5e8f6b7-5048-4809-96df-46f7bf6dba10" path="/var/lib/kubelet/pods/c5e8f6b7-5048-4809-96df-46f7bf6dba10/volumes" Oct 01 14:13:59 crc kubenswrapper[4605]: I1001 14:13:59.926802 4605 scope.go:117] "RemoveContainer" containerID="3c03d2675f439d8ac4c6a292ee6900d11d4b18b69f30da560ed0997de5eb5196" Oct 01 14:13:59 crc kubenswrapper[4605]: E1001 14:13:59.927376 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:14:08 crc kubenswrapper[4605]: I1001 14:14:08.032270 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-b33b-account-create-k9fvm"] Oct 01 14:14:08 crc kubenswrapper[4605]: I1001 14:14:08.039728 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-f398-account-create-69t8k"] Oct 01 14:14:08 crc kubenswrapper[4605]: I1001 14:14:08.046862 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-e3c6-account-create-gh4rs"] Oct 01 14:14:08 crc kubenswrapper[4605]: I1001 14:14:08.056774 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-b33b-account-create-k9fvm"] Oct 01 14:14:08 crc kubenswrapper[4605]: I1001 14:14:08.070188 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-e3c6-account-create-gh4rs"] Oct 01 14:14:08 crc kubenswrapper[4605]: I1001 14:14:08.079103 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-f398-account-create-69t8k"] Oct 01 14:14:09 crc kubenswrapper[4605]: I1001 14:14:09.941039 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4f2464d3-9ea7-482c-aa2b-f66d42141756" path="/var/lib/kubelet/pods/4f2464d3-9ea7-482c-aa2b-f66d42141756/volumes" Oct 01 14:14:09 crc kubenswrapper[4605]: I1001 14:14:09.941702 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5ce5e6eb-9c71-46d4-938d-707d1586936d" path="/var/lib/kubelet/pods/5ce5e6eb-9c71-46d4-938d-707d1586936d/volumes" Oct 01 14:14:09 crc kubenswrapper[4605]: I1001 14:14:09.942379 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dc6e55f0-8554-4e08-a425-104179aecfd3" path="/var/lib/kubelet/pods/dc6e55f0-8554-4e08-a425-104179aecfd3/volumes" Oct 01 14:14:11 crc kubenswrapper[4605]: I1001 14:14:11.927003 4605 scope.go:117] "RemoveContainer" containerID="3c03d2675f439d8ac4c6a292ee6900d11d4b18b69f30da560ed0997de5eb5196" Oct 01 14:14:11 crc kubenswrapper[4605]: E1001 14:14:11.927830 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:14:26 crc kubenswrapper[4605]: I1001 14:14:26.927043 4605 scope.go:117] "RemoveContainer" containerID="3c03d2675f439d8ac4c6a292ee6900d11d4b18b69f30da560ed0997de5eb5196" Oct 01 14:14:26 crc kubenswrapper[4605]: E1001 14:14:26.927756 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:14:30 crc kubenswrapper[4605]: E1001 14:14:30.816819 4605 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod31cfc16a_0d93_4cc4_9281_e4cee9664772.slice/crio-conmon-ebe5b7885168da5b34676e67f865a31cf00f80cd72777e2b3b592b59d017b769.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod31cfc16a_0d93_4cc4_9281_e4cee9664772.slice/crio-ebe5b7885168da5b34676e67f865a31cf00f80cd72777e2b3b592b59d017b769.scope\": RecentStats: unable to find data in memory cache]" Oct 01 14:14:31 crc kubenswrapper[4605]: I1001 14:14:31.505560 4605 generic.go:334] "Generic (PLEG): container finished" podID="31cfc16a-0d93-4cc4-9281-e4cee9664772" containerID="ebe5b7885168da5b34676e67f865a31cf00f80cd72777e2b3b592b59d017b769" exitCode=0 Oct 01 14:14:31 crc kubenswrapper[4605]: I1001 14:14:31.505674 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-9pc94" event={"ID":"31cfc16a-0d93-4cc4-9281-e4cee9664772","Type":"ContainerDied","Data":"ebe5b7885168da5b34676e67f865a31cf00f80cd72777e2b3b592b59d017b769"} Oct 01 14:14:32 crc kubenswrapper[4605]: I1001 14:14:32.918511 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-9pc94" Oct 01 14:14:33 crc kubenswrapper[4605]: I1001 14:14:33.090162 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/31cfc16a-0d93-4cc4-9281-e4cee9664772-ssh-key\") pod \"31cfc16a-0d93-4cc4-9281-e4cee9664772\" (UID: \"31cfc16a-0d93-4cc4-9281-e4cee9664772\") " Oct 01 14:14:33 crc kubenswrapper[4605]: I1001 14:14:33.090211 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-js8nv\" (UniqueName: \"kubernetes.io/projected/31cfc16a-0d93-4cc4-9281-e4cee9664772-kube-api-access-js8nv\") pod \"31cfc16a-0d93-4cc4-9281-e4cee9664772\" (UID: \"31cfc16a-0d93-4cc4-9281-e4cee9664772\") " Oct 01 14:14:33 crc kubenswrapper[4605]: I1001 14:14:33.090276 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/31cfc16a-0d93-4cc4-9281-e4cee9664772-inventory\") pod \"31cfc16a-0d93-4cc4-9281-e4cee9664772\" (UID: \"31cfc16a-0d93-4cc4-9281-e4cee9664772\") " Oct 01 14:14:33 crc kubenswrapper[4605]: I1001 14:14:33.099946 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31cfc16a-0d93-4cc4-9281-e4cee9664772-kube-api-access-js8nv" (OuterVolumeSpecName: "kube-api-access-js8nv") pod "31cfc16a-0d93-4cc4-9281-e4cee9664772" (UID: "31cfc16a-0d93-4cc4-9281-e4cee9664772"). InnerVolumeSpecName "kube-api-access-js8nv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:14:33 crc kubenswrapper[4605]: I1001 14:14:33.115774 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31cfc16a-0d93-4cc4-9281-e4cee9664772-inventory" (OuterVolumeSpecName: "inventory") pod "31cfc16a-0d93-4cc4-9281-e4cee9664772" (UID: "31cfc16a-0d93-4cc4-9281-e4cee9664772"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:14:33 crc kubenswrapper[4605]: I1001 14:14:33.120557 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31cfc16a-0d93-4cc4-9281-e4cee9664772-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "31cfc16a-0d93-4cc4-9281-e4cee9664772" (UID: "31cfc16a-0d93-4cc4-9281-e4cee9664772"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:14:33 crc kubenswrapper[4605]: I1001 14:14:33.193828 4605 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/31cfc16a-0d93-4cc4-9281-e4cee9664772-inventory\") on node \"crc\" DevicePath \"\"" Oct 01 14:14:33 crc kubenswrapper[4605]: I1001 14:14:33.193861 4605 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/31cfc16a-0d93-4cc4-9281-e4cee9664772-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 01 14:14:33 crc kubenswrapper[4605]: I1001 14:14:33.193871 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-js8nv\" (UniqueName: \"kubernetes.io/projected/31cfc16a-0d93-4cc4-9281-e4cee9664772-kube-api-access-js8nv\") on node \"crc\" DevicePath \"\"" Oct 01 14:14:33 crc kubenswrapper[4605]: I1001 14:14:33.523368 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-9pc94" event={"ID":"31cfc16a-0d93-4cc4-9281-e4cee9664772","Type":"ContainerDied","Data":"0a576e533e09ba2766e5ce612dd94e15da09d7a66f0ee8d38fe1ed3a7c0ede82"} Oct 01 14:14:33 crc kubenswrapper[4605]: I1001 14:14:33.523425 4605 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0a576e533e09ba2766e5ce612dd94e15da09d7a66f0ee8d38fe1ed3a7c0ede82" Oct 01 14:14:33 crc kubenswrapper[4605]: I1001 14:14:33.523432 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-9pc94" Oct 01 14:14:33 crc kubenswrapper[4605]: I1001 14:14:33.607673 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-5jnc8"] Oct 01 14:14:33 crc kubenswrapper[4605]: E1001 14:14:33.608396 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31cfc16a-0d93-4cc4-9281-e4cee9664772" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Oct 01 14:14:33 crc kubenswrapper[4605]: I1001 14:14:33.608413 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="31cfc16a-0d93-4cc4-9281-e4cee9664772" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Oct 01 14:14:33 crc kubenswrapper[4605]: I1001 14:14:33.608662 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="31cfc16a-0d93-4cc4-9281-e4cee9664772" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Oct 01 14:14:33 crc kubenswrapper[4605]: I1001 14:14:33.609300 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-5jnc8" Oct 01 14:14:33 crc kubenswrapper[4605]: I1001 14:14:33.613593 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 01 14:14:33 crc kubenswrapper[4605]: I1001 14:14:33.613784 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 01 14:14:33 crc kubenswrapper[4605]: I1001 14:14:33.613796 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 01 14:14:33 crc kubenswrapper[4605]: I1001 14:14:33.613984 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-dpzpx" Oct 01 14:14:33 crc kubenswrapper[4605]: I1001 14:14:33.631953 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-5jnc8"] Oct 01 14:14:33 crc kubenswrapper[4605]: I1001 14:14:33.803827 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/94e0445e-6b97-4b10-80a3-5d8827ce0120-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-5jnc8\" (UID: \"94e0445e-6b97-4b10-80a3-5d8827ce0120\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-5jnc8" Oct 01 14:14:33 crc kubenswrapper[4605]: I1001 14:14:33.803959 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/94e0445e-6b97-4b10-80a3-5d8827ce0120-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-5jnc8\" (UID: \"94e0445e-6b97-4b10-80a3-5d8827ce0120\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-5jnc8" Oct 01 14:14:33 crc kubenswrapper[4605]: I1001 14:14:33.803996 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bk884\" (UniqueName: \"kubernetes.io/projected/94e0445e-6b97-4b10-80a3-5d8827ce0120-kube-api-access-bk884\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-5jnc8\" (UID: \"94e0445e-6b97-4b10-80a3-5d8827ce0120\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-5jnc8" Oct 01 14:14:33 crc kubenswrapper[4605]: I1001 14:14:33.906941 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/94e0445e-6b97-4b10-80a3-5d8827ce0120-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-5jnc8\" (UID: \"94e0445e-6b97-4b10-80a3-5d8827ce0120\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-5jnc8" Oct 01 14:14:33 crc kubenswrapper[4605]: I1001 14:14:33.907112 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/94e0445e-6b97-4b10-80a3-5d8827ce0120-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-5jnc8\" (UID: \"94e0445e-6b97-4b10-80a3-5d8827ce0120\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-5jnc8" Oct 01 14:14:33 crc kubenswrapper[4605]: I1001 14:14:33.907161 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bk884\" (UniqueName: \"kubernetes.io/projected/94e0445e-6b97-4b10-80a3-5d8827ce0120-kube-api-access-bk884\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-5jnc8\" (UID: \"94e0445e-6b97-4b10-80a3-5d8827ce0120\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-5jnc8" Oct 01 14:14:33 crc kubenswrapper[4605]: I1001 14:14:33.913599 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/94e0445e-6b97-4b10-80a3-5d8827ce0120-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-5jnc8\" (UID: \"94e0445e-6b97-4b10-80a3-5d8827ce0120\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-5jnc8" Oct 01 14:14:33 crc kubenswrapper[4605]: I1001 14:14:33.933846 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/94e0445e-6b97-4b10-80a3-5d8827ce0120-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-5jnc8\" (UID: \"94e0445e-6b97-4b10-80a3-5d8827ce0120\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-5jnc8" Oct 01 14:14:33 crc kubenswrapper[4605]: I1001 14:14:33.944209 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bk884\" (UniqueName: \"kubernetes.io/projected/94e0445e-6b97-4b10-80a3-5d8827ce0120-kube-api-access-bk884\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-5jnc8\" (UID: \"94e0445e-6b97-4b10-80a3-5d8827ce0120\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-5jnc8" Oct 01 14:14:34 crc kubenswrapper[4605]: I1001 14:14:34.042665 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-mfgwh"] Oct 01 14:14:34 crc kubenswrapper[4605]: I1001 14:14:34.056063 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-mfgwh"] Oct 01 14:14:34 crc kubenswrapper[4605]: I1001 14:14:34.233523 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-5jnc8" Oct 01 14:14:34 crc kubenswrapper[4605]: I1001 14:14:34.746661 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-5jnc8"] Oct 01 14:14:34 crc kubenswrapper[4605]: W1001 14:14:34.751382 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod94e0445e_6b97_4b10_80a3_5d8827ce0120.slice/crio-ce54cdc4546c10a239e1b0999c03ed90a2a7d93577ea51ccfc3a311b85a5347c WatchSource:0}: Error finding container ce54cdc4546c10a239e1b0999c03ed90a2a7d93577ea51ccfc3a311b85a5347c: Status 404 returned error can't find the container with id ce54cdc4546c10a239e1b0999c03ed90a2a7d93577ea51ccfc3a311b85a5347c Oct 01 14:14:35 crc kubenswrapper[4605]: I1001 14:14:35.540875 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-5jnc8" event={"ID":"94e0445e-6b97-4b10-80a3-5d8827ce0120","Type":"ContainerStarted","Data":"ce54cdc4546c10a239e1b0999c03ed90a2a7d93577ea51ccfc3a311b85a5347c"} Oct 01 14:14:35 crc kubenswrapper[4605]: I1001 14:14:35.939911 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9a5c924f-a949-47cd-802a-4246716c3504" path="/var/lib/kubelet/pods/9a5c924f-a949-47cd-802a-4246716c3504/volumes" Oct 01 14:14:36 crc kubenswrapper[4605]: I1001 14:14:36.552821 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-5jnc8" event={"ID":"94e0445e-6b97-4b10-80a3-5d8827ce0120","Type":"ContainerStarted","Data":"b2befe1bffd04780ef6950d5b2193cd5e778e823b90220ed14dcf77d7139d5b3"} Oct 01 14:14:36 crc kubenswrapper[4605]: I1001 14:14:36.574625 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-5jnc8" podStartSLOduration=2.922620392 podStartE2EDuration="3.574608607s" podCreationTimestamp="2025-10-01 14:14:33 +0000 UTC" firstStartedPulling="2025-10-01 14:14:34.754207082 +0000 UTC m=+1797.498183300" lastFinishedPulling="2025-10-01 14:14:35.406195307 +0000 UTC m=+1798.150171515" observedRunningTime="2025-10-01 14:14:36.573726094 +0000 UTC m=+1799.317702302" watchObservedRunningTime="2025-10-01 14:14:36.574608607 +0000 UTC m=+1799.318584815" Oct 01 14:14:40 crc kubenswrapper[4605]: I1001 14:14:40.592161 4605 generic.go:334] "Generic (PLEG): container finished" podID="94e0445e-6b97-4b10-80a3-5d8827ce0120" containerID="b2befe1bffd04780ef6950d5b2193cd5e778e823b90220ed14dcf77d7139d5b3" exitCode=0 Oct 01 14:14:40 crc kubenswrapper[4605]: I1001 14:14:40.592657 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-5jnc8" event={"ID":"94e0445e-6b97-4b10-80a3-5d8827ce0120","Type":"ContainerDied","Data":"b2befe1bffd04780ef6950d5b2193cd5e778e823b90220ed14dcf77d7139d5b3"} Oct 01 14:14:41 crc kubenswrapper[4605]: I1001 14:14:41.927047 4605 scope.go:117] "RemoveContainer" containerID="3c03d2675f439d8ac4c6a292ee6900d11d4b18b69f30da560ed0997de5eb5196" Oct 01 14:14:41 crc kubenswrapper[4605]: E1001 14:14:41.927906 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:14:42 crc kubenswrapper[4605]: I1001 14:14:42.038756 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-5jnc8" Oct 01 14:14:42 crc kubenswrapper[4605]: I1001 14:14:42.193670 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bk884\" (UniqueName: \"kubernetes.io/projected/94e0445e-6b97-4b10-80a3-5d8827ce0120-kube-api-access-bk884\") pod \"94e0445e-6b97-4b10-80a3-5d8827ce0120\" (UID: \"94e0445e-6b97-4b10-80a3-5d8827ce0120\") " Oct 01 14:14:42 crc kubenswrapper[4605]: I1001 14:14:42.193787 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/94e0445e-6b97-4b10-80a3-5d8827ce0120-inventory\") pod \"94e0445e-6b97-4b10-80a3-5d8827ce0120\" (UID: \"94e0445e-6b97-4b10-80a3-5d8827ce0120\") " Oct 01 14:14:42 crc kubenswrapper[4605]: I1001 14:14:42.193841 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/94e0445e-6b97-4b10-80a3-5d8827ce0120-ssh-key\") pod \"94e0445e-6b97-4b10-80a3-5d8827ce0120\" (UID: \"94e0445e-6b97-4b10-80a3-5d8827ce0120\") " Oct 01 14:14:42 crc kubenswrapper[4605]: I1001 14:14:42.209009 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/94e0445e-6b97-4b10-80a3-5d8827ce0120-kube-api-access-bk884" (OuterVolumeSpecName: "kube-api-access-bk884") pod "94e0445e-6b97-4b10-80a3-5d8827ce0120" (UID: "94e0445e-6b97-4b10-80a3-5d8827ce0120"). InnerVolumeSpecName "kube-api-access-bk884". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:14:42 crc kubenswrapper[4605]: I1001 14:14:42.220251 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94e0445e-6b97-4b10-80a3-5d8827ce0120-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "94e0445e-6b97-4b10-80a3-5d8827ce0120" (UID: "94e0445e-6b97-4b10-80a3-5d8827ce0120"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:14:42 crc kubenswrapper[4605]: I1001 14:14:42.236610 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94e0445e-6b97-4b10-80a3-5d8827ce0120-inventory" (OuterVolumeSpecName: "inventory") pod "94e0445e-6b97-4b10-80a3-5d8827ce0120" (UID: "94e0445e-6b97-4b10-80a3-5d8827ce0120"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:14:42 crc kubenswrapper[4605]: I1001 14:14:42.296928 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bk884\" (UniqueName: \"kubernetes.io/projected/94e0445e-6b97-4b10-80a3-5d8827ce0120-kube-api-access-bk884\") on node \"crc\" DevicePath \"\"" Oct 01 14:14:42 crc kubenswrapper[4605]: I1001 14:14:42.297593 4605 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/94e0445e-6b97-4b10-80a3-5d8827ce0120-inventory\") on node \"crc\" DevicePath \"\"" Oct 01 14:14:42 crc kubenswrapper[4605]: I1001 14:14:42.297693 4605 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/94e0445e-6b97-4b10-80a3-5d8827ce0120-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 01 14:14:42 crc kubenswrapper[4605]: I1001 14:14:42.611693 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-5jnc8" event={"ID":"94e0445e-6b97-4b10-80a3-5d8827ce0120","Type":"ContainerDied","Data":"ce54cdc4546c10a239e1b0999c03ed90a2a7d93577ea51ccfc3a311b85a5347c"} Oct 01 14:14:42 crc kubenswrapper[4605]: I1001 14:14:42.611923 4605 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ce54cdc4546c10a239e1b0999c03ed90a2a7d93577ea51ccfc3a311b85a5347c" Oct 01 14:14:42 crc kubenswrapper[4605]: I1001 14:14:42.611772 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-5jnc8" Oct 01 14:14:42 crc kubenswrapper[4605]: I1001 14:14:42.703510 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-95t2p"] Oct 01 14:14:42 crc kubenswrapper[4605]: E1001 14:14:42.705848 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94e0445e-6b97-4b10-80a3-5d8827ce0120" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Oct 01 14:14:42 crc kubenswrapper[4605]: I1001 14:14:42.706390 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="94e0445e-6b97-4b10-80a3-5d8827ce0120" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Oct 01 14:14:42 crc kubenswrapper[4605]: I1001 14:14:42.706653 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="94e0445e-6b97-4b10-80a3-5d8827ce0120" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Oct 01 14:14:42 crc kubenswrapper[4605]: I1001 14:14:42.707464 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-95t2p" Oct 01 14:14:42 crc kubenswrapper[4605]: I1001 14:14:42.716084 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-95t2p"] Oct 01 14:14:42 crc kubenswrapper[4605]: I1001 14:14:42.718457 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 01 14:14:42 crc kubenswrapper[4605]: I1001 14:14:42.718625 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 01 14:14:42 crc kubenswrapper[4605]: I1001 14:14:42.718735 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 01 14:14:42 crc kubenswrapper[4605]: I1001 14:14:42.718853 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-dpzpx" Oct 01 14:14:42 crc kubenswrapper[4605]: I1001 14:14:42.908300 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f9bcp\" (UniqueName: \"kubernetes.io/projected/0a383a44-66c3-466b-977f-4297fa2f9718-kube-api-access-f9bcp\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-95t2p\" (UID: \"0a383a44-66c3-466b-977f-4297fa2f9718\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-95t2p" Oct 01 14:14:42 crc kubenswrapper[4605]: I1001 14:14:42.908349 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0a383a44-66c3-466b-977f-4297fa2f9718-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-95t2p\" (UID: \"0a383a44-66c3-466b-977f-4297fa2f9718\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-95t2p" Oct 01 14:14:42 crc kubenswrapper[4605]: I1001 14:14:42.908469 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0a383a44-66c3-466b-977f-4297fa2f9718-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-95t2p\" (UID: \"0a383a44-66c3-466b-977f-4297fa2f9718\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-95t2p" Oct 01 14:14:43 crc kubenswrapper[4605]: I1001 14:14:43.010941 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f9bcp\" (UniqueName: \"kubernetes.io/projected/0a383a44-66c3-466b-977f-4297fa2f9718-kube-api-access-f9bcp\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-95t2p\" (UID: \"0a383a44-66c3-466b-977f-4297fa2f9718\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-95t2p" Oct 01 14:14:43 crc kubenswrapper[4605]: I1001 14:14:43.011016 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0a383a44-66c3-466b-977f-4297fa2f9718-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-95t2p\" (UID: \"0a383a44-66c3-466b-977f-4297fa2f9718\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-95t2p" Oct 01 14:14:43 crc kubenswrapper[4605]: I1001 14:14:43.011211 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0a383a44-66c3-466b-977f-4297fa2f9718-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-95t2p\" (UID: \"0a383a44-66c3-466b-977f-4297fa2f9718\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-95t2p" Oct 01 14:14:43 crc kubenswrapper[4605]: I1001 14:14:43.016614 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0a383a44-66c3-466b-977f-4297fa2f9718-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-95t2p\" (UID: \"0a383a44-66c3-466b-977f-4297fa2f9718\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-95t2p" Oct 01 14:14:43 crc kubenswrapper[4605]: I1001 14:14:43.019785 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0a383a44-66c3-466b-977f-4297fa2f9718-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-95t2p\" (UID: \"0a383a44-66c3-466b-977f-4297fa2f9718\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-95t2p" Oct 01 14:14:43 crc kubenswrapper[4605]: I1001 14:14:43.040177 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f9bcp\" (UniqueName: \"kubernetes.io/projected/0a383a44-66c3-466b-977f-4297fa2f9718-kube-api-access-f9bcp\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-95t2p\" (UID: \"0a383a44-66c3-466b-977f-4297fa2f9718\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-95t2p" Oct 01 14:14:43 crc kubenswrapper[4605]: I1001 14:14:43.340913 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-95t2p" Oct 01 14:14:43 crc kubenswrapper[4605]: I1001 14:14:43.889492 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-95t2p"] Oct 01 14:14:44 crc kubenswrapper[4605]: I1001 14:14:44.626994 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-95t2p" event={"ID":"0a383a44-66c3-466b-977f-4297fa2f9718","Type":"ContainerStarted","Data":"37b9bced12c61ef07c179f78fb214d5df9aa18f75f412efdfe697ae5c1c7266b"} Oct 01 14:14:44 crc kubenswrapper[4605]: I1001 14:14:44.628393 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-95t2p" event={"ID":"0a383a44-66c3-466b-977f-4297fa2f9718","Type":"ContainerStarted","Data":"bc87df9d552c538720d9c73e003b5afa063f826021b23a1a12b7df40af6572fe"} Oct 01 14:14:44 crc kubenswrapper[4605]: I1001 14:14:44.640409 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-95t2p" podStartSLOduration=2.237442716 podStartE2EDuration="2.640392856s" podCreationTimestamp="2025-10-01 14:14:42 +0000 UTC" firstStartedPulling="2025-10-01 14:14:43.885525417 +0000 UTC m=+1806.629501615" lastFinishedPulling="2025-10-01 14:14:44.288475547 +0000 UTC m=+1807.032451755" observedRunningTime="2025-10-01 14:14:44.639613796 +0000 UTC m=+1807.383590004" watchObservedRunningTime="2025-10-01 14:14:44.640392856 +0000 UTC m=+1807.384369064" Oct 01 14:14:50 crc kubenswrapper[4605]: I1001 14:14:50.842084 4605 scope.go:117] "RemoveContainer" containerID="e334721b61dc5d90affcd8f0a3e2285d2a8cb08cd629cd21a2a518ddf97af508" Oct 01 14:14:50 crc kubenswrapper[4605]: I1001 14:14:50.872782 4605 scope.go:117] "RemoveContainer" containerID="250d6b8a09b193137b67bd55f34b17a3f4bf3c4d1882f5cb82360a8a042a2879" Oct 01 14:14:50 crc kubenswrapper[4605]: I1001 14:14:50.924558 4605 scope.go:117] "RemoveContainer" containerID="123cca5e71db19976a221cc6cca0ba3d968e708fea37788101a6847e016d065c" Oct 01 14:14:50 crc kubenswrapper[4605]: I1001 14:14:50.956508 4605 scope.go:117] "RemoveContainer" containerID="188a0f09b5cb4e548b3d2465a56eb8af13bfc7e1f35bc11b09668440c410db3e" Oct 01 14:14:51 crc kubenswrapper[4605]: I1001 14:14:51.023509 4605 scope.go:117] "RemoveContainer" containerID="961793ddfe31ddb97cbf276bdd0b26daaef6c3a80911c80711a2357fc2566656" Oct 01 14:14:51 crc kubenswrapper[4605]: I1001 14:14:51.043356 4605 scope.go:117] "RemoveContainer" containerID="d5fc73d95a746f544d49b3bce8808c5f11c17c8f4f4366de68f1b4db0fcb8524" Oct 01 14:14:51 crc kubenswrapper[4605]: I1001 14:14:51.086885 4605 scope.go:117] "RemoveContainer" containerID="7241abc649ee174058942ad5bee531d8101adcbac6b73a26098d508c089a73fd" Oct 01 14:14:52 crc kubenswrapper[4605]: I1001 14:14:52.927712 4605 scope.go:117] "RemoveContainer" containerID="3c03d2675f439d8ac4c6a292ee6900d11d4b18b69f30da560ed0997de5eb5196" Oct 01 14:14:52 crc kubenswrapper[4605]: E1001 14:14:52.928194 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:14:59 crc kubenswrapper[4605]: I1001 14:14:59.046661 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-fh5rm"] Oct 01 14:14:59 crc kubenswrapper[4605]: I1001 14:14:59.057543 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-fh5rm"] Oct 01 14:14:59 crc kubenswrapper[4605]: I1001 14:14:59.938179 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44883e34-65b9-4eef-a5ae-75d88804c94f" path="/var/lib/kubelet/pods/44883e34-65b9-4eef-a5ae-75d88804c94f/volumes" Oct 01 14:15:00 crc kubenswrapper[4605]: I1001 14:15:00.034262 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-78hpx"] Oct 01 14:15:00 crc kubenswrapper[4605]: I1001 14:15:00.043407 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-78hpx"] Oct 01 14:15:00 crc kubenswrapper[4605]: I1001 14:15:00.167579 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29322135-7crjv"] Oct 01 14:15:00 crc kubenswrapper[4605]: I1001 14:15:00.169158 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29322135-7crjv" Oct 01 14:15:00 crc kubenswrapper[4605]: I1001 14:15:00.174573 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 01 14:15:00 crc kubenswrapper[4605]: I1001 14:15:00.174742 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 01 14:15:00 crc kubenswrapper[4605]: I1001 14:15:00.206807 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29322135-7crjv"] Oct 01 14:15:00 crc kubenswrapper[4605]: I1001 14:15:00.236462 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a60b01b6-8502-42df-aaa0-f09ef79a7223-secret-volume\") pod \"collect-profiles-29322135-7crjv\" (UID: \"a60b01b6-8502-42df-aaa0-f09ef79a7223\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322135-7crjv" Oct 01 14:15:00 crc kubenswrapper[4605]: I1001 14:15:00.236848 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pwnbn\" (UniqueName: \"kubernetes.io/projected/a60b01b6-8502-42df-aaa0-f09ef79a7223-kube-api-access-pwnbn\") pod \"collect-profiles-29322135-7crjv\" (UID: \"a60b01b6-8502-42df-aaa0-f09ef79a7223\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322135-7crjv" Oct 01 14:15:00 crc kubenswrapper[4605]: I1001 14:15:00.236951 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a60b01b6-8502-42df-aaa0-f09ef79a7223-config-volume\") pod \"collect-profiles-29322135-7crjv\" (UID: \"a60b01b6-8502-42df-aaa0-f09ef79a7223\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322135-7crjv" Oct 01 14:15:00 crc kubenswrapper[4605]: I1001 14:15:00.339028 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a60b01b6-8502-42df-aaa0-f09ef79a7223-secret-volume\") pod \"collect-profiles-29322135-7crjv\" (UID: \"a60b01b6-8502-42df-aaa0-f09ef79a7223\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322135-7crjv" Oct 01 14:15:00 crc kubenswrapper[4605]: I1001 14:15:00.339179 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pwnbn\" (UniqueName: \"kubernetes.io/projected/a60b01b6-8502-42df-aaa0-f09ef79a7223-kube-api-access-pwnbn\") pod \"collect-profiles-29322135-7crjv\" (UID: \"a60b01b6-8502-42df-aaa0-f09ef79a7223\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322135-7crjv" Oct 01 14:15:00 crc kubenswrapper[4605]: I1001 14:15:00.339239 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a60b01b6-8502-42df-aaa0-f09ef79a7223-config-volume\") pod \"collect-profiles-29322135-7crjv\" (UID: \"a60b01b6-8502-42df-aaa0-f09ef79a7223\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322135-7crjv" Oct 01 14:15:00 crc kubenswrapper[4605]: I1001 14:15:00.340540 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a60b01b6-8502-42df-aaa0-f09ef79a7223-config-volume\") pod \"collect-profiles-29322135-7crjv\" (UID: \"a60b01b6-8502-42df-aaa0-f09ef79a7223\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322135-7crjv" Oct 01 14:15:00 crc kubenswrapper[4605]: I1001 14:15:00.356733 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a60b01b6-8502-42df-aaa0-f09ef79a7223-secret-volume\") pod \"collect-profiles-29322135-7crjv\" (UID: \"a60b01b6-8502-42df-aaa0-f09ef79a7223\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322135-7crjv" Oct 01 14:15:00 crc kubenswrapper[4605]: I1001 14:15:00.366895 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pwnbn\" (UniqueName: \"kubernetes.io/projected/a60b01b6-8502-42df-aaa0-f09ef79a7223-kube-api-access-pwnbn\") pod \"collect-profiles-29322135-7crjv\" (UID: \"a60b01b6-8502-42df-aaa0-f09ef79a7223\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322135-7crjv" Oct 01 14:15:00 crc kubenswrapper[4605]: I1001 14:15:00.532445 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29322135-7crjv" Oct 01 14:15:01 crc kubenswrapper[4605]: I1001 14:15:01.016168 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29322135-7crjv"] Oct 01 14:15:01 crc kubenswrapper[4605]: W1001 14:15:01.024619 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda60b01b6_8502_42df_aaa0_f09ef79a7223.slice/crio-80c1603662ed5fcbbb9b144b3ce41fbee0762a9e429d8d7a1903f26fe8639f70 WatchSource:0}: Error finding container 80c1603662ed5fcbbb9b144b3ce41fbee0762a9e429d8d7a1903f26fe8639f70: Status 404 returned error can't find the container with id 80c1603662ed5fcbbb9b144b3ce41fbee0762a9e429d8d7a1903f26fe8639f70 Oct 01 14:15:01 crc kubenswrapper[4605]: I1001 14:15:01.783526 4605 generic.go:334] "Generic (PLEG): container finished" podID="a60b01b6-8502-42df-aaa0-f09ef79a7223" containerID="76c4360bdca1e3e106f93c1be3b42345582fbc8d6be2d65b3ae78788c16937ff" exitCode=0 Oct 01 14:15:01 crc kubenswrapper[4605]: I1001 14:15:01.784123 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29322135-7crjv" event={"ID":"a60b01b6-8502-42df-aaa0-f09ef79a7223","Type":"ContainerDied","Data":"76c4360bdca1e3e106f93c1be3b42345582fbc8d6be2d65b3ae78788c16937ff"} Oct 01 14:15:01 crc kubenswrapper[4605]: I1001 14:15:01.784171 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29322135-7crjv" event={"ID":"a60b01b6-8502-42df-aaa0-f09ef79a7223","Type":"ContainerStarted","Data":"80c1603662ed5fcbbb9b144b3ce41fbee0762a9e429d8d7a1903f26fe8639f70"} Oct 01 14:15:01 crc kubenswrapper[4605]: I1001 14:15:01.937911 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6d1195cd-cd23-4f48-a14a-f2ee21a0c0e2" path="/var/lib/kubelet/pods/6d1195cd-cd23-4f48-a14a-f2ee21a0c0e2/volumes" Oct 01 14:15:03 crc kubenswrapper[4605]: I1001 14:15:03.105152 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29322135-7crjv" Oct 01 14:15:03 crc kubenswrapper[4605]: I1001 14:15:03.292616 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a60b01b6-8502-42df-aaa0-f09ef79a7223-secret-volume\") pod \"a60b01b6-8502-42df-aaa0-f09ef79a7223\" (UID: \"a60b01b6-8502-42df-aaa0-f09ef79a7223\") " Oct 01 14:15:03 crc kubenswrapper[4605]: I1001 14:15:03.292682 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a60b01b6-8502-42df-aaa0-f09ef79a7223-config-volume\") pod \"a60b01b6-8502-42df-aaa0-f09ef79a7223\" (UID: \"a60b01b6-8502-42df-aaa0-f09ef79a7223\") " Oct 01 14:15:03 crc kubenswrapper[4605]: I1001 14:15:03.292882 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pwnbn\" (UniqueName: \"kubernetes.io/projected/a60b01b6-8502-42df-aaa0-f09ef79a7223-kube-api-access-pwnbn\") pod \"a60b01b6-8502-42df-aaa0-f09ef79a7223\" (UID: \"a60b01b6-8502-42df-aaa0-f09ef79a7223\") " Oct 01 14:15:03 crc kubenswrapper[4605]: I1001 14:15:03.293677 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a60b01b6-8502-42df-aaa0-f09ef79a7223-config-volume" (OuterVolumeSpecName: "config-volume") pod "a60b01b6-8502-42df-aaa0-f09ef79a7223" (UID: "a60b01b6-8502-42df-aaa0-f09ef79a7223"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:15:03 crc kubenswrapper[4605]: I1001 14:15:03.301372 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a60b01b6-8502-42df-aaa0-f09ef79a7223-kube-api-access-pwnbn" (OuterVolumeSpecName: "kube-api-access-pwnbn") pod "a60b01b6-8502-42df-aaa0-f09ef79a7223" (UID: "a60b01b6-8502-42df-aaa0-f09ef79a7223"). InnerVolumeSpecName "kube-api-access-pwnbn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:15:03 crc kubenswrapper[4605]: I1001 14:15:03.301375 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a60b01b6-8502-42df-aaa0-f09ef79a7223-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "a60b01b6-8502-42df-aaa0-f09ef79a7223" (UID: "a60b01b6-8502-42df-aaa0-f09ef79a7223"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:15:03 crc kubenswrapper[4605]: I1001 14:15:03.394634 4605 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a60b01b6-8502-42df-aaa0-f09ef79a7223-config-volume\") on node \"crc\" DevicePath \"\"" Oct 01 14:15:03 crc kubenswrapper[4605]: I1001 14:15:03.394679 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pwnbn\" (UniqueName: \"kubernetes.io/projected/a60b01b6-8502-42df-aaa0-f09ef79a7223-kube-api-access-pwnbn\") on node \"crc\" DevicePath \"\"" Oct 01 14:15:03 crc kubenswrapper[4605]: I1001 14:15:03.394691 4605 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a60b01b6-8502-42df-aaa0-f09ef79a7223-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 01 14:15:03 crc kubenswrapper[4605]: I1001 14:15:03.799976 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29322135-7crjv" event={"ID":"a60b01b6-8502-42df-aaa0-f09ef79a7223","Type":"ContainerDied","Data":"80c1603662ed5fcbbb9b144b3ce41fbee0762a9e429d8d7a1903f26fe8639f70"} Oct 01 14:15:03 crc kubenswrapper[4605]: I1001 14:15:03.800049 4605 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="80c1603662ed5fcbbb9b144b3ce41fbee0762a9e429d8d7a1903f26fe8639f70" Oct 01 14:15:03 crc kubenswrapper[4605]: I1001 14:15:03.800114 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29322135-7crjv" Oct 01 14:15:07 crc kubenswrapper[4605]: I1001 14:15:07.933813 4605 scope.go:117] "RemoveContainer" containerID="3c03d2675f439d8ac4c6a292ee6900d11d4b18b69f30da560ed0997de5eb5196" Oct 01 14:15:07 crc kubenswrapper[4605]: E1001 14:15:07.934673 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:15:18 crc kubenswrapper[4605]: I1001 14:15:18.928231 4605 scope.go:117] "RemoveContainer" containerID="3c03d2675f439d8ac4c6a292ee6900d11d4b18b69f30da560ed0997de5eb5196" Oct 01 14:15:18 crc kubenswrapper[4605]: E1001 14:15:18.929222 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:15:26 crc kubenswrapper[4605]: I1001 14:15:26.004860 4605 generic.go:334] "Generic (PLEG): container finished" podID="0a383a44-66c3-466b-977f-4297fa2f9718" containerID="37b9bced12c61ef07c179f78fb214d5df9aa18f75f412efdfe697ae5c1c7266b" exitCode=0 Oct 01 14:15:26 crc kubenswrapper[4605]: I1001 14:15:26.005458 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-95t2p" event={"ID":"0a383a44-66c3-466b-977f-4297fa2f9718","Type":"ContainerDied","Data":"37b9bced12c61ef07c179f78fb214d5df9aa18f75f412efdfe697ae5c1c7266b"} Oct 01 14:15:27 crc kubenswrapper[4605]: I1001 14:15:27.421389 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-95t2p" Oct 01 14:15:27 crc kubenswrapper[4605]: I1001 14:15:27.559764 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0a383a44-66c3-466b-977f-4297fa2f9718-ssh-key\") pod \"0a383a44-66c3-466b-977f-4297fa2f9718\" (UID: \"0a383a44-66c3-466b-977f-4297fa2f9718\") " Oct 01 14:15:27 crc kubenswrapper[4605]: I1001 14:15:27.559933 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0a383a44-66c3-466b-977f-4297fa2f9718-inventory\") pod \"0a383a44-66c3-466b-977f-4297fa2f9718\" (UID: \"0a383a44-66c3-466b-977f-4297fa2f9718\") " Oct 01 14:15:27 crc kubenswrapper[4605]: I1001 14:15:27.560025 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f9bcp\" (UniqueName: \"kubernetes.io/projected/0a383a44-66c3-466b-977f-4297fa2f9718-kube-api-access-f9bcp\") pod \"0a383a44-66c3-466b-977f-4297fa2f9718\" (UID: \"0a383a44-66c3-466b-977f-4297fa2f9718\") " Oct 01 14:15:27 crc kubenswrapper[4605]: I1001 14:15:27.581844 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0a383a44-66c3-466b-977f-4297fa2f9718-kube-api-access-f9bcp" (OuterVolumeSpecName: "kube-api-access-f9bcp") pod "0a383a44-66c3-466b-977f-4297fa2f9718" (UID: "0a383a44-66c3-466b-977f-4297fa2f9718"). InnerVolumeSpecName "kube-api-access-f9bcp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:15:27 crc kubenswrapper[4605]: I1001 14:15:27.602137 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0a383a44-66c3-466b-977f-4297fa2f9718-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "0a383a44-66c3-466b-977f-4297fa2f9718" (UID: "0a383a44-66c3-466b-977f-4297fa2f9718"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:15:27 crc kubenswrapper[4605]: I1001 14:15:27.612962 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0a383a44-66c3-466b-977f-4297fa2f9718-inventory" (OuterVolumeSpecName: "inventory") pod "0a383a44-66c3-466b-977f-4297fa2f9718" (UID: "0a383a44-66c3-466b-977f-4297fa2f9718"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:15:27 crc kubenswrapper[4605]: I1001 14:15:27.662502 4605 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0a383a44-66c3-466b-977f-4297fa2f9718-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 01 14:15:27 crc kubenswrapper[4605]: I1001 14:15:27.662543 4605 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0a383a44-66c3-466b-977f-4297fa2f9718-inventory\") on node \"crc\" DevicePath \"\"" Oct 01 14:15:27 crc kubenswrapper[4605]: I1001 14:15:27.662557 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f9bcp\" (UniqueName: \"kubernetes.io/projected/0a383a44-66c3-466b-977f-4297fa2f9718-kube-api-access-f9bcp\") on node \"crc\" DevicePath \"\"" Oct 01 14:15:28 crc kubenswrapper[4605]: I1001 14:15:28.022509 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-95t2p" event={"ID":"0a383a44-66c3-466b-977f-4297fa2f9718","Type":"ContainerDied","Data":"bc87df9d552c538720d9c73e003b5afa063f826021b23a1a12b7df40af6572fe"} Oct 01 14:15:28 crc kubenswrapper[4605]: I1001 14:15:28.022551 4605 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bc87df9d552c538720d9c73e003b5afa063f826021b23a1a12b7df40af6572fe" Oct 01 14:15:28 crc kubenswrapper[4605]: I1001 14:15:28.022627 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-95t2p" Oct 01 14:15:28 crc kubenswrapper[4605]: I1001 14:15:28.134968 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-b6ldt"] Oct 01 14:15:28 crc kubenswrapper[4605]: E1001 14:15:28.135668 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0a383a44-66c3-466b-977f-4297fa2f9718" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Oct 01 14:15:28 crc kubenswrapper[4605]: I1001 14:15:28.135699 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="0a383a44-66c3-466b-977f-4297fa2f9718" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Oct 01 14:15:28 crc kubenswrapper[4605]: E1001 14:15:28.135744 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a60b01b6-8502-42df-aaa0-f09ef79a7223" containerName="collect-profiles" Oct 01 14:15:28 crc kubenswrapper[4605]: I1001 14:15:28.135757 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="a60b01b6-8502-42df-aaa0-f09ef79a7223" containerName="collect-profiles" Oct 01 14:15:28 crc kubenswrapper[4605]: I1001 14:15:28.136068 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="0a383a44-66c3-466b-977f-4297fa2f9718" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Oct 01 14:15:28 crc kubenswrapper[4605]: I1001 14:15:28.136135 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="a60b01b6-8502-42df-aaa0-f09ef79a7223" containerName="collect-profiles" Oct 01 14:15:28 crc kubenswrapper[4605]: I1001 14:15:28.137330 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-b6ldt" Oct 01 14:15:28 crc kubenswrapper[4605]: I1001 14:15:28.139758 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 01 14:15:28 crc kubenswrapper[4605]: I1001 14:15:28.140269 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 01 14:15:28 crc kubenswrapper[4605]: I1001 14:15:28.148691 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-dpzpx" Oct 01 14:15:28 crc kubenswrapper[4605]: I1001 14:15:28.149148 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 01 14:15:28 crc kubenswrapper[4605]: I1001 14:15:28.160302 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-b6ldt"] Oct 01 14:15:28 crc kubenswrapper[4605]: I1001 14:15:28.273174 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/84e4e275-5c28-4ea8-bf23-154b3aaa036d-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-b6ldt\" (UID: \"84e4e275-5c28-4ea8-bf23-154b3aaa036d\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-b6ldt" Oct 01 14:15:28 crc kubenswrapper[4605]: I1001 14:15:28.273235 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7vsgd\" (UniqueName: \"kubernetes.io/projected/84e4e275-5c28-4ea8-bf23-154b3aaa036d-kube-api-access-7vsgd\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-b6ldt\" (UID: \"84e4e275-5c28-4ea8-bf23-154b3aaa036d\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-b6ldt" Oct 01 14:15:28 crc kubenswrapper[4605]: I1001 14:15:28.273320 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/84e4e275-5c28-4ea8-bf23-154b3aaa036d-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-b6ldt\" (UID: \"84e4e275-5c28-4ea8-bf23-154b3aaa036d\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-b6ldt" Oct 01 14:15:28 crc kubenswrapper[4605]: I1001 14:15:28.374954 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/84e4e275-5c28-4ea8-bf23-154b3aaa036d-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-b6ldt\" (UID: \"84e4e275-5c28-4ea8-bf23-154b3aaa036d\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-b6ldt" Oct 01 14:15:28 crc kubenswrapper[4605]: I1001 14:15:28.375008 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7vsgd\" (UniqueName: \"kubernetes.io/projected/84e4e275-5c28-4ea8-bf23-154b3aaa036d-kube-api-access-7vsgd\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-b6ldt\" (UID: \"84e4e275-5c28-4ea8-bf23-154b3aaa036d\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-b6ldt" Oct 01 14:15:28 crc kubenswrapper[4605]: I1001 14:15:28.375050 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/84e4e275-5c28-4ea8-bf23-154b3aaa036d-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-b6ldt\" (UID: \"84e4e275-5c28-4ea8-bf23-154b3aaa036d\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-b6ldt" Oct 01 14:15:28 crc kubenswrapper[4605]: I1001 14:15:28.380175 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/84e4e275-5c28-4ea8-bf23-154b3aaa036d-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-b6ldt\" (UID: \"84e4e275-5c28-4ea8-bf23-154b3aaa036d\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-b6ldt" Oct 01 14:15:28 crc kubenswrapper[4605]: I1001 14:15:28.380640 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/84e4e275-5c28-4ea8-bf23-154b3aaa036d-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-b6ldt\" (UID: \"84e4e275-5c28-4ea8-bf23-154b3aaa036d\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-b6ldt" Oct 01 14:15:28 crc kubenswrapper[4605]: I1001 14:15:28.402851 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7vsgd\" (UniqueName: \"kubernetes.io/projected/84e4e275-5c28-4ea8-bf23-154b3aaa036d-kube-api-access-7vsgd\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-b6ldt\" (UID: \"84e4e275-5c28-4ea8-bf23-154b3aaa036d\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-b6ldt" Oct 01 14:15:28 crc kubenswrapper[4605]: I1001 14:15:28.493628 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-b6ldt" Oct 01 14:15:29 crc kubenswrapper[4605]: I1001 14:15:29.060108 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-b6ldt"] Oct 01 14:15:30 crc kubenswrapper[4605]: I1001 14:15:30.052379 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-b6ldt" event={"ID":"84e4e275-5c28-4ea8-bf23-154b3aaa036d","Type":"ContainerStarted","Data":"03f8252cc662127a2494f3b3f4796a69d8dc0aaf26fe01ce131b06d651665e47"} Oct 01 14:15:30 crc kubenswrapper[4605]: I1001 14:15:30.052722 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-b6ldt" event={"ID":"84e4e275-5c28-4ea8-bf23-154b3aaa036d","Type":"ContainerStarted","Data":"0ffadc61e0975ca8dd10021fccd49b0185675dcbd7737724b9351c8167632b60"} Oct 01 14:15:30 crc kubenswrapper[4605]: I1001 14:15:30.926416 4605 scope.go:117] "RemoveContainer" containerID="3c03d2675f439d8ac4c6a292ee6900d11d4b18b69f30da560ed0997de5eb5196" Oct 01 14:15:30 crc kubenswrapper[4605]: E1001 14:15:30.926919 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:15:43 crc kubenswrapper[4605]: I1001 14:15:43.929980 4605 scope.go:117] "RemoveContainer" containerID="3c03d2675f439d8ac4c6a292ee6900d11d4b18b69f30da560ed0997de5eb5196" Oct 01 14:15:43 crc kubenswrapper[4605]: E1001 14:15:43.931916 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:15:45 crc kubenswrapper[4605]: I1001 14:15:45.039693 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-b6ldt" podStartSLOduration=16.324565325000002 podStartE2EDuration="17.039662489s" podCreationTimestamp="2025-10-01 14:15:28 +0000 UTC" firstStartedPulling="2025-10-01 14:15:29.06571941 +0000 UTC m=+1851.809695618" lastFinishedPulling="2025-10-01 14:15:29.780816574 +0000 UTC m=+1852.524792782" observedRunningTime="2025-10-01 14:15:30.069754048 +0000 UTC m=+1852.813730256" watchObservedRunningTime="2025-10-01 14:15:45.039662489 +0000 UTC m=+1867.783638727" Oct 01 14:15:45 crc kubenswrapper[4605]: I1001 14:15:45.046041 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-g6sp8"] Oct 01 14:15:45 crc kubenswrapper[4605]: I1001 14:15:45.059224 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-g6sp8"] Oct 01 14:15:45 crc kubenswrapper[4605]: I1001 14:15:45.945886 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="db269c11-d648-45e6-baa2-ffa53799b1d6" path="/var/lib/kubelet/pods/db269c11-d648-45e6-baa2-ffa53799b1d6/volumes" Oct 01 14:15:51 crc kubenswrapper[4605]: I1001 14:15:51.232296 4605 scope.go:117] "RemoveContainer" containerID="409e3f3177d2af1bb27ec05ffd965bc3ec8187272d7ca9d0028809254304f380" Oct 01 14:15:51 crc kubenswrapper[4605]: I1001 14:15:51.269785 4605 scope.go:117] "RemoveContainer" containerID="79d8c909cce78c5a584cf96f0aac7f210acfc1fdbcc47a9252a30626b2e2a6d2" Oct 01 14:15:51 crc kubenswrapper[4605]: I1001 14:15:51.349636 4605 scope.go:117] "RemoveContainer" containerID="9ba34209f48d3a15cd7dc1488e53fa06fddb53fd75a3123c9226f90490efabb3" Oct 01 14:15:57 crc kubenswrapper[4605]: I1001 14:15:57.941166 4605 scope.go:117] "RemoveContainer" containerID="3c03d2675f439d8ac4c6a292ee6900d11d4b18b69f30da560ed0997de5eb5196" Oct 01 14:15:57 crc kubenswrapper[4605]: E1001 14:15:57.942311 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:16:12 crc kubenswrapper[4605]: I1001 14:16:12.927011 4605 scope.go:117] "RemoveContainer" containerID="3c03d2675f439d8ac4c6a292ee6900d11d4b18b69f30da560ed0997de5eb5196" Oct 01 14:16:12 crc kubenswrapper[4605]: E1001 14:16:12.928368 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:16:24 crc kubenswrapper[4605]: I1001 14:16:24.927777 4605 scope.go:117] "RemoveContainer" containerID="3c03d2675f439d8ac4c6a292ee6900d11d4b18b69f30da560ed0997de5eb5196" Oct 01 14:16:25 crc kubenswrapper[4605]: I1001 14:16:25.532533 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" event={"ID":"f3023060-c8ae-492b-b1cb-a418d9a8e59f","Type":"ContainerStarted","Data":"f06cb3938ac439d54af13ffb12c4fd504d9a9e79baaf158e6ffa42cfea1ffc76"} Oct 01 14:16:28 crc kubenswrapper[4605]: I1001 14:16:28.564684 4605 generic.go:334] "Generic (PLEG): container finished" podID="84e4e275-5c28-4ea8-bf23-154b3aaa036d" containerID="03f8252cc662127a2494f3b3f4796a69d8dc0aaf26fe01ce131b06d651665e47" exitCode=2 Oct 01 14:16:28 crc kubenswrapper[4605]: I1001 14:16:28.564801 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-b6ldt" event={"ID":"84e4e275-5c28-4ea8-bf23-154b3aaa036d","Type":"ContainerDied","Data":"03f8252cc662127a2494f3b3f4796a69d8dc0aaf26fe01ce131b06d651665e47"} Oct 01 14:16:30 crc kubenswrapper[4605]: I1001 14:16:30.076541 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-b6ldt" Oct 01 14:16:30 crc kubenswrapper[4605]: I1001 14:16:30.218508 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/84e4e275-5c28-4ea8-bf23-154b3aaa036d-ssh-key\") pod \"84e4e275-5c28-4ea8-bf23-154b3aaa036d\" (UID: \"84e4e275-5c28-4ea8-bf23-154b3aaa036d\") " Oct 01 14:16:30 crc kubenswrapper[4605]: I1001 14:16:30.219217 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7vsgd\" (UniqueName: \"kubernetes.io/projected/84e4e275-5c28-4ea8-bf23-154b3aaa036d-kube-api-access-7vsgd\") pod \"84e4e275-5c28-4ea8-bf23-154b3aaa036d\" (UID: \"84e4e275-5c28-4ea8-bf23-154b3aaa036d\") " Oct 01 14:16:30 crc kubenswrapper[4605]: I1001 14:16:30.219702 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/84e4e275-5c28-4ea8-bf23-154b3aaa036d-inventory\") pod \"84e4e275-5c28-4ea8-bf23-154b3aaa036d\" (UID: \"84e4e275-5c28-4ea8-bf23-154b3aaa036d\") " Oct 01 14:16:30 crc kubenswrapper[4605]: I1001 14:16:30.227382 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/84e4e275-5c28-4ea8-bf23-154b3aaa036d-kube-api-access-7vsgd" (OuterVolumeSpecName: "kube-api-access-7vsgd") pod "84e4e275-5c28-4ea8-bf23-154b3aaa036d" (UID: "84e4e275-5c28-4ea8-bf23-154b3aaa036d"). InnerVolumeSpecName "kube-api-access-7vsgd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:16:30 crc kubenswrapper[4605]: I1001 14:16:30.248259 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/84e4e275-5c28-4ea8-bf23-154b3aaa036d-inventory" (OuterVolumeSpecName: "inventory") pod "84e4e275-5c28-4ea8-bf23-154b3aaa036d" (UID: "84e4e275-5c28-4ea8-bf23-154b3aaa036d"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:16:30 crc kubenswrapper[4605]: I1001 14:16:30.249772 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/84e4e275-5c28-4ea8-bf23-154b3aaa036d-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "84e4e275-5c28-4ea8-bf23-154b3aaa036d" (UID: "84e4e275-5c28-4ea8-bf23-154b3aaa036d"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:16:30 crc kubenswrapper[4605]: I1001 14:16:30.323168 4605 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/84e4e275-5c28-4ea8-bf23-154b3aaa036d-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 01 14:16:30 crc kubenswrapper[4605]: I1001 14:16:30.323203 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7vsgd\" (UniqueName: \"kubernetes.io/projected/84e4e275-5c28-4ea8-bf23-154b3aaa036d-kube-api-access-7vsgd\") on node \"crc\" DevicePath \"\"" Oct 01 14:16:30 crc kubenswrapper[4605]: I1001 14:16:30.323216 4605 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/84e4e275-5c28-4ea8-bf23-154b3aaa036d-inventory\") on node \"crc\" DevicePath \"\"" Oct 01 14:16:30 crc kubenswrapper[4605]: I1001 14:16:30.590587 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-b6ldt" event={"ID":"84e4e275-5c28-4ea8-bf23-154b3aaa036d","Type":"ContainerDied","Data":"0ffadc61e0975ca8dd10021fccd49b0185675dcbd7737724b9351c8167632b60"} Oct 01 14:16:30 crc kubenswrapper[4605]: I1001 14:16:30.590645 4605 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0ffadc61e0975ca8dd10021fccd49b0185675dcbd7737724b9351c8167632b60" Oct 01 14:16:30 crc kubenswrapper[4605]: I1001 14:16:30.590670 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-b6ldt" Oct 01 14:16:37 crc kubenswrapper[4605]: I1001 14:16:37.027415 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-g6xmd"] Oct 01 14:16:37 crc kubenswrapper[4605]: E1001 14:16:37.028323 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84e4e275-5c28-4ea8-bf23-154b3aaa036d" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 01 14:16:37 crc kubenswrapper[4605]: I1001 14:16:37.028337 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="84e4e275-5c28-4ea8-bf23-154b3aaa036d" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 01 14:16:37 crc kubenswrapper[4605]: I1001 14:16:37.028544 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="84e4e275-5c28-4ea8-bf23-154b3aaa036d" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 01 14:16:37 crc kubenswrapper[4605]: I1001 14:16:37.029227 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-g6xmd" Oct 01 14:16:37 crc kubenswrapper[4605]: I1001 14:16:37.031801 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 01 14:16:37 crc kubenswrapper[4605]: I1001 14:16:37.032002 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 01 14:16:37 crc kubenswrapper[4605]: I1001 14:16:37.032138 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-dpzpx" Oct 01 14:16:37 crc kubenswrapper[4605]: I1001 14:16:37.032946 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 01 14:16:37 crc kubenswrapper[4605]: I1001 14:16:37.053461 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/283ccb8d-6321-440f-a0a6-f2118a4f9bf5-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-g6xmd\" (UID: \"283ccb8d-6321-440f-a0a6-f2118a4f9bf5\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-g6xmd" Oct 01 14:16:37 crc kubenswrapper[4605]: I1001 14:16:37.053602 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/283ccb8d-6321-440f-a0a6-f2118a4f9bf5-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-g6xmd\" (UID: \"283ccb8d-6321-440f-a0a6-f2118a4f9bf5\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-g6xmd" Oct 01 14:16:37 crc kubenswrapper[4605]: I1001 14:16:37.053717 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jbgmf\" (UniqueName: \"kubernetes.io/projected/283ccb8d-6321-440f-a0a6-f2118a4f9bf5-kube-api-access-jbgmf\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-g6xmd\" (UID: \"283ccb8d-6321-440f-a0a6-f2118a4f9bf5\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-g6xmd" Oct 01 14:16:37 crc kubenswrapper[4605]: I1001 14:16:37.091327 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-g6xmd"] Oct 01 14:16:37 crc kubenswrapper[4605]: I1001 14:16:37.155978 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/283ccb8d-6321-440f-a0a6-f2118a4f9bf5-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-g6xmd\" (UID: \"283ccb8d-6321-440f-a0a6-f2118a4f9bf5\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-g6xmd" Oct 01 14:16:37 crc kubenswrapper[4605]: I1001 14:16:37.156130 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/283ccb8d-6321-440f-a0a6-f2118a4f9bf5-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-g6xmd\" (UID: \"283ccb8d-6321-440f-a0a6-f2118a4f9bf5\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-g6xmd" Oct 01 14:16:37 crc kubenswrapper[4605]: I1001 14:16:37.156237 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jbgmf\" (UniqueName: \"kubernetes.io/projected/283ccb8d-6321-440f-a0a6-f2118a4f9bf5-kube-api-access-jbgmf\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-g6xmd\" (UID: \"283ccb8d-6321-440f-a0a6-f2118a4f9bf5\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-g6xmd" Oct 01 14:16:37 crc kubenswrapper[4605]: I1001 14:16:37.162732 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/283ccb8d-6321-440f-a0a6-f2118a4f9bf5-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-g6xmd\" (UID: \"283ccb8d-6321-440f-a0a6-f2118a4f9bf5\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-g6xmd" Oct 01 14:16:37 crc kubenswrapper[4605]: I1001 14:16:37.163373 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/283ccb8d-6321-440f-a0a6-f2118a4f9bf5-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-g6xmd\" (UID: \"283ccb8d-6321-440f-a0a6-f2118a4f9bf5\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-g6xmd" Oct 01 14:16:37 crc kubenswrapper[4605]: I1001 14:16:37.177889 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jbgmf\" (UniqueName: \"kubernetes.io/projected/283ccb8d-6321-440f-a0a6-f2118a4f9bf5-kube-api-access-jbgmf\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-g6xmd\" (UID: \"283ccb8d-6321-440f-a0a6-f2118a4f9bf5\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-g6xmd" Oct 01 14:16:37 crc kubenswrapper[4605]: I1001 14:16:37.356684 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-g6xmd" Oct 01 14:16:37 crc kubenswrapper[4605]: I1001 14:16:37.912890 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-g6xmd"] Oct 01 14:16:37 crc kubenswrapper[4605]: I1001 14:16:37.914207 4605 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 01 14:16:38 crc kubenswrapper[4605]: I1001 14:16:38.479822 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 01 14:16:38 crc kubenswrapper[4605]: I1001 14:16:38.677649 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-g6xmd" event={"ID":"283ccb8d-6321-440f-a0a6-f2118a4f9bf5","Type":"ContainerStarted","Data":"adc1c599393e0a40fa6bf7063cb6d0564e9bdf0eff2711d3ed06cba71ed593d0"} Oct 01 14:16:39 crc kubenswrapper[4605]: I1001 14:16:39.694773 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-g6xmd" event={"ID":"283ccb8d-6321-440f-a0a6-f2118a4f9bf5","Type":"ContainerStarted","Data":"da438d8e3ce095ec4cfbd5e566fb139c03114ba956ee57cbb2dc9f2b28beb511"} Oct 01 14:16:39 crc kubenswrapper[4605]: I1001 14:16:39.712188 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-g6xmd" podStartSLOduration=2.148409382 podStartE2EDuration="2.712174234s" podCreationTimestamp="2025-10-01 14:16:37 +0000 UTC" firstStartedPulling="2025-10-01 14:16:37.913689855 +0000 UTC m=+1920.657666063" lastFinishedPulling="2025-10-01 14:16:38.477454707 +0000 UTC m=+1921.221430915" observedRunningTime="2025-10-01 14:16:39.709891056 +0000 UTC m=+1922.453867264" watchObservedRunningTime="2025-10-01 14:16:39.712174234 +0000 UTC m=+1922.456150432" Oct 01 14:17:29 crc kubenswrapper[4605]: I1001 14:17:29.143145 4605 generic.go:334] "Generic (PLEG): container finished" podID="283ccb8d-6321-440f-a0a6-f2118a4f9bf5" containerID="da438d8e3ce095ec4cfbd5e566fb139c03114ba956ee57cbb2dc9f2b28beb511" exitCode=0 Oct 01 14:17:29 crc kubenswrapper[4605]: I1001 14:17:29.143742 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-g6xmd" event={"ID":"283ccb8d-6321-440f-a0a6-f2118a4f9bf5","Type":"ContainerDied","Data":"da438d8e3ce095ec4cfbd5e566fb139c03114ba956ee57cbb2dc9f2b28beb511"} Oct 01 14:17:30 crc kubenswrapper[4605]: I1001 14:17:30.573147 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-g6xmd" Oct 01 14:17:30 crc kubenswrapper[4605]: I1001 14:17:30.685489 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/283ccb8d-6321-440f-a0a6-f2118a4f9bf5-ssh-key\") pod \"283ccb8d-6321-440f-a0a6-f2118a4f9bf5\" (UID: \"283ccb8d-6321-440f-a0a6-f2118a4f9bf5\") " Oct 01 14:17:30 crc kubenswrapper[4605]: I1001 14:17:30.685546 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jbgmf\" (UniqueName: \"kubernetes.io/projected/283ccb8d-6321-440f-a0a6-f2118a4f9bf5-kube-api-access-jbgmf\") pod \"283ccb8d-6321-440f-a0a6-f2118a4f9bf5\" (UID: \"283ccb8d-6321-440f-a0a6-f2118a4f9bf5\") " Oct 01 14:17:30 crc kubenswrapper[4605]: I1001 14:17:30.685625 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/283ccb8d-6321-440f-a0a6-f2118a4f9bf5-inventory\") pod \"283ccb8d-6321-440f-a0a6-f2118a4f9bf5\" (UID: \"283ccb8d-6321-440f-a0a6-f2118a4f9bf5\") " Oct 01 14:17:30 crc kubenswrapper[4605]: I1001 14:17:30.704551 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/283ccb8d-6321-440f-a0a6-f2118a4f9bf5-kube-api-access-jbgmf" (OuterVolumeSpecName: "kube-api-access-jbgmf") pod "283ccb8d-6321-440f-a0a6-f2118a4f9bf5" (UID: "283ccb8d-6321-440f-a0a6-f2118a4f9bf5"). InnerVolumeSpecName "kube-api-access-jbgmf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:17:30 crc kubenswrapper[4605]: I1001 14:17:30.712622 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/283ccb8d-6321-440f-a0a6-f2118a4f9bf5-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "283ccb8d-6321-440f-a0a6-f2118a4f9bf5" (UID: "283ccb8d-6321-440f-a0a6-f2118a4f9bf5"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:17:30 crc kubenswrapper[4605]: I1001 14:17:30.752734 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/283ccb8d-6321-440f-a0a6-f2118a4f9bf5-inventory" (OuterVolumeSpecName: "inventory") pod "283ccb8d-6321-440f-a0a6-f2118a4f9bf5" (UID: "283ccb8d-6321-440f-a0a6-f2118a4f9bf5"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:17:30 crc kubenswrapper[4605]: I1001 14:17:30.788154 4605 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/283ccb8d-6321-440f-a0a6-f2118a4f9bf5-inventory\") on node \"crc\" DevicePath \"\"" Oct 01 14:17:30 crc kubenswrapper[4605]: I1001 14:17:30.788184 4605 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/283ccb8d-6321-440f-a0a6-f2118a4f9bf5-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 01 14:17:30 crc kubenswrapper[4605]: I1001 14:17:30.788194 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jbgmf\" (UniqueName: \"kubernetes.io/projected/283ccb8d-6321-440f-a0a6-f2118a4f9bf5-kube-api-access-jbgmf\") on node \"crc\" DevicePath \"\"" Oct 01 14:17:31 crc kubenswrapper[4605]: I1001 14:17:31.169679 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-g6xmd" event={"ID":"283ccb8d-6321-440f-a0a6-f2118a4f9bf5","Type":"ContainerDied","Data":"adc1c599393e0a40fa6bf7063cb6d0564e9bdf0eff2711d3ed06cba71ed593d0"} Oct 01 14:17:31 crc kubenswrapper[4605]: I1001 14:17:31.169992 4605 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="adc1c599393e0a40fa6bf7063cb6d0564e9bdf0eff2711d3ed06cba71ed593d0" Oct 01 14:17:31 crc kubenswrapper[4605]: I1001 14:17:31.169737 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-g6xmd" Oct 01 14:17:31 crc kubenswrapper[4605]: I1001 14:17:31.252873 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-ql62n"] Oct 01 14:17:31 crc kubenswrapper[4605]: E1001 14:17:31.253346 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="283ccb8d-6321-440f-a0a6-f2118a4f9bf5" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 01 14:17:31 crc kubenswrapper[4605]: I1001 14:17:31.253371 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="283ccb8d-6321-440f-a0a6-f2118a4f9bf5" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 01 14:17:31 crc kubenswrapper[4605]: I1001 14:17:31.253594 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="283ccb8d-6321-440f-a0a6-f2118a4f9bf5" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 01 14:17:31 crc kubenswrapper[4605]: I1001 14:17:31.254464 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-ql62n" Oct 01 14:17:31 crc kubenswrapper[4605]: I1001 14:17:31.257477 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 01 14:17:31 crc kubenswrapper[4605]: I1001 14:17:31.259487 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 01 14:17:31 crc kubenswrapper[4605]: I1001 14:17:31.259841 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-dpzpx" Oct 01 14:17:31 crc kubenswrapper[4605]: I1001 14:17:31.260966 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 01 14:17:31 crc kubenswrapper[4605]: I1001 14:17:31.265081 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-ql62n"] Oct 01 14:17:31 crc kubenswrapper[4605]: I1001 14:17:31.399125 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/375a2afa-b804-4227-8137-eb7c5c56d8fb-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-ql62n\" (UID: \"375a2afa-b804-4227-8137-eb7c5c56d8fb\") " pod="openstack/ssh-known-hosts-edpm-deployment-ql62n" Oct 01 14:17:31 crc kubenswrapper[4605]: I1001 14:17:31.399220 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-552nx\" (UniqueName: \"kubernetes.io/projected/375a2afa-b804-4227-8137-eb7c5c56d8fb-kube-api-access-552nx\") pod \"ssh-known-hosts-edpm-deployment-ql62n\" (UID: \"375a2afa-b804-4227-8137-eb7c5c56d8fb\") " pod="openstack/ssh-known-hosts-edpm-deployment-ql62n" Oct 01 14:17:31 crc kubenswrapper[4605]: I1001 14:17:31.399306 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/375a2afa-b804-4227-8137-eb7c5c56d8fb-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-ql62n\" (UID: \"375a2afa-b804-4227-8137-eb7c5c56d8fb\") " pod="openstack/ssh-known-hosts-edpm-deployment-ql62n" Oct 01 14:17:31 crc kubenswrapper[4605]: I1001 14:17:31.500451 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/375a2afa-b804-4227-8137-eb7c5c56d8fb-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-ql62n\" (UID: \"375a2afa-b804-4227-8137-eb7c5c56d8fb\") " pod="openstack/ssh-known-hosts-edpm-deployment-ql62n" Oct 01 14:17:31 crc kubenswrapper[4605]: I1001 14:17:31.500537 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-552nx\" (UniqueName: \"kubernetes.io/projected/375a2afa-b804-4227-8137-eb7c5c56d8fb-kube-api-access-552nx\") pod \"ssh-known-hosts-edpm-deployment-ql62n\" (UID: \"375a2afa-b804-4227-8137-eb7c5c56d8fb\") " pod="openstack/ssh-known-hosts-edpm-deployment-ql62n" Oct 01 14:17:31 crc kubenswrapper[4605]: I1001 14:17:31.500604 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/375a2afa-b804-4227-8137-eb7c5c56d8fb-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-ql62n\" (UID: \"375a2afa-b804-4227-8137-eb7c5c56d8fb\") " pod="openstack/ssh-known-hosts-edpm-deployment-ql62n" Oct 01 14:17:31 crc kubenswrapper[4605]: I1001 14:17:31.504867 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/375a2afa-b804-4227-8137-eb7c5c56d8fb-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-ql62n\" (UID: \"375a2afa-b804-4227-8137-eb7c5c56d8fb\") " pod="openstack/ssh-known-hosts-edpm-deployment-ql62n" Oct 01 14:17:31 crc kubenswrapper[4605]: I1001 14:17:31.512643 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/375a2afa-b804-4227-8137-eb7c5c56d8fb-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-ql62n\" (UID: \"375a2afa-b804-4227-8137-eb7c5c56d8fb\") " pod="openstack/ssh-known-hosts-edpm-deployment-ql62n" Oct 01 14:17:31 crc kubenswrapper[4605]: I1001 14:17:31.522071 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-552nx\" (UniqueName: \"kubernetes.io/projected/375a2afa-b804-4227-8137-eb7c5c56d8fb-kube-api-access-552nx\") pod \"ssh-known-hosts-edpm-deployment-ql62n\" (UID: \"375a2afa-b804-4227-8137-eb7c5c56d8fb\") " pod="openstack/ssh-known-hosts-edpm-deployment-ql62n" Oct 01 14:17:31 crc kubenswrapper[4605]: I1001 14:17:31.573490 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-ql62n" Oct 01 14:17:32 crc kubenswrapper[4605]: I1001 14:17:32.102265 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-ql62n"] Oct 01 14:17:32 crc kubenswrapper[4605]: I1001 14:17:32.180182 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-ql62n" event={"ID":"375a2afa-b804-4227-8137-eb7c5c56d8fb","Type":"ContainerStarted","Data":"1855d1f00fbdc5d0e6bcc101b823b0b66a1a37a0ba7b3012e61daa94aa3786f5"} Oct 01 14:17:33 crc kubenswrapper[4605]: I1001 14:17:33.193256 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-ql62n" event={"ID":"375a2afa-b804-4227-8137-eb7c5c56d8fb","Type":"ContainerStarted","Data":"d055703d6f2507de6453cdc005e0ad751c07187b34310ef4b62f5b99280c7657"} Oct 01 14:17:33 crc kubenswrapper[4605]: I1001 14:17:33.212509 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-edpm-deployment-ql62n" podStartSLOduration=1.535112987 podStartE2EDuration="2.212485684s" podCreationTimestamp="2025-10-01 14:17:31 +0000 UTC" firstStartedPulling="2025-10-01 14:17:32.112410956 +0000 UTC m=+1974.856387204" lastFinishedPulling="2025-10-01 14:17:32.789783653 +0000 UTC m=+1975.533759901" observedRunningTime="2025-10-01 14:17:33.211368086 +0000 UTC m=+1975.955344294" watchObservedRunningTime="2025-10-01 14:17:33.212485684 +0000 UTC m=+1975.956461912" Oct 01 14:17:40 crc kubenswrapper[4605]: I1001 14:17:40.252226 4605 generic.go:334] "Generic (PLEG): container finished" podID="375a2afa-b804-4227-8137-eb7c5c56d8fb" containerID="d055703d6f2507de6453cdc005e0ad751c07187b34310ef4b62f5b99280c7657" exitCode=0 Oct 01 14:17:40 crc kubenswrapper[4605]: I1001 14:17:40.252408 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-ql62n" event={"ID":"375a2afa-b804-4227-8137-eb7c5c56d8fb","Type":"ContainerDied","Data":"d055703d6f2507de6453cdc005e0ad751c07187b34310ef4b62f5b99280c7657"} Oct 01 14:17:41 crc kubenswrapper[4605]: I1001 14:17:41.663798 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-ql62n" Oct 01 14:17:41 crc kubenswrapper[4605]: I1001 14:17:41.793937 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/375a2afa-b804-4227-8137-eb7c5c56d8fb-ssh-key-openstack-edpm-ipam\") pod \"375a2afa-b804-4227-8137-eb7c5c56d8fb\" (UID: \"375a2afa-b804-4227-8137-eb7c5c56d8fb\") " Oct 01 14:17:41 crc kubenswrapper[4605]: I1001 14:17:41.794063 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/375a2afa-b804-4227-8137-eb7c5c56d8fb-inventory-0\") pod \"375a2afa-b804-4227-8137-eb7c5c56d8fb\" (UID: \"375a2afa-b804-4227-8137-eb7c5c56d8fb\") " Oct 01 14:17:41 crc kubenswrapper[4605]: I1001 14:17:41.794215 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-552nx\" (UniqueName: \"kubernetes.io/projected/375a2afa-b804-4227-8137-eb7c5c56d8fb-kube-api-access-552nx\") pod \"375a2afa-b804-4227-8137-eb7c5c56d8fb\" (UID: \"375a2afa-b804-4227-8137-eb7c5c56d8fb\") " Oct 01 14:17:41 crc kubenswrapper[4605]: I1001 14:17:41.800626 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/375a2afa-b804-4227-8137-eb7c5c56d8fb-kube-api-access-552nx" (OuterVolumeSpecName: "kube-api-access-552nx") pod "375a2afa-b804-4227-8137-eb7c5c56d8fb" (UID: "375a2afa-b804-4227-8137-eb7c5c56d8fb"). InnerVolumeSpecName "kube-api-access-552nx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:17:41 crc kubenswrapper[4605]: I1001 14:17:41.825996 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/375a2afa-b804-4227-8137-eb7c5c56d8fb-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "375a2afa-b804-4227-8137-eb7c5c56d8fb" (UID: "375a2afa-b804-4227-8137-eb7c5c56d8fb"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:17:41 crc kubenswrapper[4605]: I1001 14:17:41.830549 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/375a2afa-b804-4227-8137-eb7c5c56d8fb-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "375a2afa-b804-4227-8137-eb7c5c56d8fb" (UID: "375a2afa-b804-4227-8137-eb7c5c56d8fb"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:17:41 crc kubenswrapper[4605]: I1001 14:17:41.896734 4605 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/375a2afa-b804-4227-8137-eb7c5c56d8fb-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Oct 01 14:17:41 crc kubenswrapper[4605]: I1001 14:17:41.896779 4605 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/375a2afa-b804-4227-8137-eb7c5c56d8fb-inventory-0\") on node \"crc\" DevicePath \"\"" Oct 01 14:17:41 crc kubenswrapper[4605]: I1001 14:17:41.896792 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-552nx\" (UniqueName: \"kubernetes.io/projected/375a2afa-b804-4227-8137-eb7c5c56d8fb-kube-api-access-552nx\") on node \"crc\" DevicePath \"\"" Oct 01 14:17:42 crc kubenswrapper[4605]: I1001 14:17:42.271216 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-ql62n" event={"ID":"375a2afa-b804-4227-8137-eb7c5c56d8fb","Type":"ContainerDied","Data":"1855d1f00fbdc5d0e6bcc101b823b0b66a1a37a0ba7b3012e61daa94aa3786f5"} Oct 01 14:17:42 crc kubenswrapper[4605]: I1001 14:17:42.271256 4605 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1855d1f00fbdc5d0e6bcc101b823b0b66a1a37a0ba7b3012e61daa94aa3786f5" Oct 01 14:17:42 crc kubenswrapper[4605]: I1001 14:17:42.271283 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-ql62n" Oct 01 14:17:42 crc kubenswrapper[4605]: I1001 14:17:42.357568 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-5lfmw"] Oct 01 14:17:42 crc kubenswrapper[4605]: E1001 14:17:42.358241 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="375a2afa-b804-4227-8137-eb7c5c56d8fb" containerName="ssh-known-hosts-edpm-deployment" Oct 01 14:17:42 crc kubenswrapper[4605]: I1001 14:17:42.358322 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="375a2afa-b804-4227-8137-eb7c5c56d8fb" containerName="ssh-known-hosts-edpm-deployment" Oct 01 14:17:42 crc kubenswrapper[4605]: I1001 14:17:42.358548 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="375a2afa-b804-4227-8137-eb7c5c56d8fb" containerName="ssh-known-hosts-edpm-deployment" Oct 01 14:17:42 crc kubenswrapper[4605]: I1001 14:17:42.359244 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-5lfmw" Oct 01 14:17:42 crc kubenswrapper[4605]: I1001 14:17:42.362111 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 01 14:17:42 crc kubenswrapper[4605]: I1001 14:17:42.362260 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 01 14:17:42 crc kubenswrapper[4605]: I1001 14:17:42.363013 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 01 14:17:42 crc kubenswrapper[4605]: I1001 14:17:42.363601 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-dpzpx" Oct 01 14:17:42 crc kubenswrapper[4605]: I1001 14:17:42.408812 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-5lfmw"] Oct 01 14:17:42 crc kubenswrapper[4605]: I1001 14:17:42.513466 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0ce2e0e7-1cfa-4fcb-87d3-214503a56fff-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-5lfmw\" (UID: \"0ce2e0e7-1cfa-4fcb-87d3-214503a56fff\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-5lfmw" Oct 01 14:17:42 crc kubenswrapper[4605]: I1001 14:17:42.513591 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pj4g2\" (UniqueName: \"kubernetes.io/projected/0ce2e0e7-1cfa-4fcb-87d3-214503a56fff-kube-api-access-pj4g2\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-5lfmw\" (UID: \"0ce2e0e7-1cfa-4fcb-87d3-214503a56fff\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-5lfmw" Oct 01 14:17:42 crc kubenswrapper[4605]: I1001 14:17:42.513676 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0ce2e0e7-1cfa-4fcb-87d3-214503a56fff-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-5lfmw\" (UID: \"0ce2e0e7-1cfa-4fcb-87d3-214503a56fff\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-5lfmw" Oct 01 14:17:42 crc kubenswrapper[4605]: I1001 14:17:42.615966 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0ce2e0e7-1cfa-4fcb-87d3-214503a56fff-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-5lfmw\" (UID: \"0ce2e0e7-1cfa-4fcb-87d3-214503a56fff\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-5lfmw" Oct 01 14:17:42 crc kubenswrapper[4605]: I1001 14:17:42.616087 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0ce2e0e7-1cfa-4fcb-87d3-214503a56fff-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-5lfmw\" (UID: \"0ce2e0e7-1cfa-4fcb-87d3-214503a56fff\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-5lfmw" Oct 01 14:17:42 crc kubenswrapper[4605]: I1001 14:17:42.616201 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pj4g2\" (UniqueName: \"kubernetes.io/projected/0ce2e0e7-1cfa-4fcb-87d3-214503a56fff-kube-api-access-pj4g2\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-5lfmw\" (UID: \"0ce2e0e7-1cfa-4fcb-87d3-214503a56fff\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-5lfmw" Oct 01 14:17:42 crc kubenswrapper[4605]: I1001 14:17:42.634609 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0ce2e0e7-1cfa-4fcb-87d3-214503a56fff-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-5lfmw\" (UID: \"0ce2e0e7-1cfa-4fcb-87d3-214503a56fff\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-5lfmw" Oct 01 14:17:42 crc kubenswrapper[4605]: I1001 14:17:42.634733 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0ce2e0e7-1cfa-4fcb-87d3-214503a56fff-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-5lfmw\" (UID: \"0ce2e0e7-1cfa-4fcb-87d3-214503a56fff\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-5lfmw" Oct 01 14:17:42 crc kubenswrapper[4605]: I1001 14:17:42.636408 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pj4g2\" (UniqueName: \"kubernetes.io/projected/0ce2e0e7-1cfa-4fcb-87d3-214503a56fff-kube-api-access-pj4g2\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-5lfmw\" (UID: \"0ce2e0e7-1cfa-4fcb-87d3-214503a56fff\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-5lfmw" Oct 01 14:17:42 crc kubenswrapper[4605]: I1001 14:17:42.677223 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-5lfmw" Oct 01 14:17:43 crc kubenswrapper[4605]: I1001 14:17:43.095598 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-5lfmw"] Oct 01 14:17:43 crc kubenswrapper[4605]: I1001 14:17:43.278874 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-5lfmw" event={"ID":"0ce2e0e7-1cfa-4fcb-87d3-214503a56fff","Type":"ContainerStarted","Data":"a9fa40e64cd59c5fd98a80ef56e0c4ab7bddd4e2694499b79ecfdc2bbca0a00c"} Oct 01 14:17:44 crc kubenswrapper[4605]: I1001 14:17:44.289794 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-5lfmw" event={"ID":"0ce2e0e7-1cfa-4fcb-87d3-214503a56fff","Type":"ContainerStarted","Data":"2de964f3a24155cc35e994a5877dffbd785e3c79f311d2486a19069bdbbe1c78"} Oct 01 14:17:44 crc kubenswrapper[4605]: I1001 14:17:44.319612 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-5lfmw" podStartSLOduration=1.725157359 podStartE2EDuration="2.319560205s" podCreationTimestamp="2025-10-01 14:17:42 +0000 UTC" firstStartedPulling="2025-10-01 14:17:43.081069793 +0000 UTC m=+1985.825046001" lastFinishedPulling="2025-10-01 14:17:43.675472639 +0000 UTC m=+1986.419448847" observedRunningTime="2025-10-01 14:17:44.318505248 +0000 UTC m=+1987.062481456" watchObservedRunningTime="2025-10-01 14:17:44.319560205 +0000 UTC m=+1987.063536453" Oct 01 14:17:53 crc kubenswrapper[4605]: I1001 14:17:53.374544 4605 generic.go:334] "Generic (PLEG): container finished" podID="0ce2e0e7-1cfa-4fcb-87d3-214503a56fff" containerID="2de964f3a24155cc35e994a5877dffbd785e3c79f311d2486a19069bdbbe1c78" exitCode=0 Oct 01 14:17:53 crc kubenswrapper[4605]: I1001 14:17:53.374639 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-5lfmw" event={"ID":"0ce2e0e7-1cfa-4fcb-87d3-214503a56fff","Type":"ContainerDied","Data":"2de964f3a24155cc35e994a5877dffbd785e3c79f311d2486a19069bdbbe1c78"} Oct 01 14:17:54 crc kubenswrapper[4605]: I1001 14:17:54.882047 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-5lfmw" Oct 01 14:17:54 crc kubenswrapper[4605]: I1001 14:17:54.919546 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0ce2e0e7-1cfa-4fcb-87d3-214503a56fff-inventory\") pod \"0ce2e0e7-1cfa-4fcb-87d3-214503a56fff\" (UID: \"0ce2e0e7-1cfa-4fcb-87d3-214503a56fff\") " Oct 01 14:17:54 crc kubenswrapper[4605]: I1001 14:17:54.919589 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0ce2e0e7-1cfa-4fcb-87d3-214503a56fff-ssh-key\") pod \"0ce2e0e7-1cfa-4fcb-87d3-214503a56fff\" (UID: \"0ce2e0e7-1cfa-4fcb-87d3-214503a56fff\") " Oct 01 14:17:54 crc kubenswrapper[4605]: I1001 14:17:54.919635 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj4g2\" (UniqueName: \"kubernetes.io/projected/0ce2e0e7-1cfa-4fcb-87d3-214503a56fff-kube-api-access-pj4g2\") pod \"0ce2e0e7-1cfa-4fcb-87d3-214503a56fff\" (UID: \"0ce2e0e7-1cfa-4fcb-87d3-214503a56fff\") " Oct 01 14:17:54 crc kubenswrapper[4605]: I1001 14:17:54.928393 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0ce2e0e7-1cfa-4fcb-87d3-214503a56fff-kube-api-access-pj4g2" (OuterVolumeSpecName: "kube-api-access-pj4g2") pod "0ce2e0e7-1cfa-4fcb-87d3-214503a56fff" (UID: "0ce2e0e7-1cfa-4fcb-87d3-214503a56fff"). InnerVolumeSpecName "kube-api-access-pj4g2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:17:54 crc kubenswrapper[4605]: I1001 14:17:54.950725 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0ce2e0e7-1cfa-4fcb-87d3-214503a56fff-inventory" (OuterVolumeSpecName: "inventory") pod "0ce2e0e7-1cfa-4fcb-87d3-214503a56fff" (UID: "0ce2e0e7-1cfa-4fcb-87d3-214503a56fff"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:17:54 crc kubenswrapper[4605]: I1001 14:17:54.962247 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0ce2e0e7-1cfa-4fcb-87d3-214503a56fff-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "0ce2e0e7-1cfa-4fcb-87d3-214503a56fff" (UID: "0ce2e0e7-1cfa-4fcb-87d3-214503a56fff"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:17:55 crc kubenswrapper[4605]: I1001 14:17:55.022147 4605 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0ce2e0e7-1cfa-4fcb-87d3-214503a56fff-inventory\") on node \"crc\" DevicePath \"\"" Oct 01 14:17:55 crc kubenswrapper[4605]: I1001 14:17:55.022184 4605 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0ce2e0e7-1cfa-4fcb-87d3-214503a56fff-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 01 14:17:55 crc kubenswrapper[4605]: I1001 14:17:55.022197 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj4g2\" (UniqueName: \"kubernetes.io/projected/0ce2e0e7-1cfa-4fcb-87d3-214503a56fff-kube-api-access-pj4g2\") on node \"crc\" DevicePath \"\"" Oct 01 14:17:55 crc kubenswrapper[4605]: I1001 14:17:55.398754 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-5lfmw" event={"ID":"0ce2e0e7-1cfa-4fcb-87d3-214503a56fff","Type":"ContainerDied","Data":"a9fa40e64cd59c5fd98a80ef56e0c4ab7bddd4e2694499b79ecfdc2bbca0a00c"} Oct 01 14:17:55 crc kubenswrapper[4605]: I1001 14:17:55.399012 4605 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a9fa40e64cd59c5fd98a80ef56e0c4ab7bddd4e2694499b79ecfdc2bbca0a00c" Oct 01 14:17:55 crc kubenswrapper[4605]: I1001 14:17:55.398964 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-5lfmw" Oct 01 14:17:55 crc kubenswrapper[4605]: I1001 14:17:55.513013 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-n4v8b"] Oct 01 14:17:55 crc kubenswrapper[4605]: E1001 14:17:55.514043 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ce2e0e7-1cfa-4fcb-87d3-214503a56fff" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Oct 01 14:17:55 crc kubenswrapper[4605]: I1001 14:17:55.514075 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ce2e0e7-1cfa-4fcb-87d3-214503a56fff" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Oct 01 14:17:55 crc kubenswrapper[4605]: I1001 14:17:55.514279 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="0ce2e0e7-1cfa-4fcb-87d3-214503a56fff" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Oct 01 14:17:55 crc kubenswrapper[4605]: I1001 14:17:55.515744 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-n4v8b" Oct 01 14:17:55 crc kubenswrapper[4605]: I1001 14:17:55.518562 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 01 14:17:55 crc kubenswrapper[4605]: I1001 14:17:55.518836 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-dpzpx" Oct 01 14:17:55 crc kubenswrapper[4605]: I1001 14:17:55.522148 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 01 14:17:55 crc kubenswrapper[4605]: I1001 14:17:55.524308 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 01 14:17:55 crc kubenswrapper[4605]: I1001 14:17:55.542319 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nbm2d\" (UniqueName: \"kubernetes.io/projected/0f112a96-c395-4ae9-8960-596266eb98b0-kube-api-access-nbm2d\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-n4v8b\" (UID: \"0f112a96-c395-4ae9-8960-596266eb98b0\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-n4v8b" Oct 01 14:17:55 crc kubenswrapper[4605]: I1001 14:17:55.542417 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0f112a96-c395-4ae9-8960-596266eb98b0-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-n4v8b\" (UID: \"0f112a96-c395-4ae9-8960-596266eb98b0\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-n4v8b" Oct 01 14:17:55 crc kubenswrapper[4605]: I1001 14:17:55.542549 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0f112a96-c395-4ae9-8960-596266eb98b0-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-n4v8b\" (UID: \"0f112a96-c395-4ae9-8960-596266eb98b0\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-n4v8b" Oct 01 14:17:55 crc kubenswrapper[4605]: I1001 14:17:55.562270 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-n4v8b"] Oct 01 14:17:55 crc kubenswrapper[4605]: I1001 14:17:55.644027 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nbm2d\" (UniqueName: \"kubernetes.io/projected/0f112a96-c395-4ae9-8960-596266eb98b0-kube-api-access-nbm2d\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-n4v8b\" (UID: \"0f112a96-c395-4ae9-8960-596266eb98b0\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-n4v8b" Oct 01 14:17:55 crc kubenswrapper[4605]: I1001 14:17:55.644075 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0f112a96-c395-4ae9-8960-596266eb98b0-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-n4v8b\" (UID: \"0f112a96-c395-4ae9-8960-596266eb98b0\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-n4v8b" Oct 01 14:17:55 crc kubenswrapper[4605]: I1001 14:17:55.644129 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0f112a96-c395-4ae9-8960-596266eb98b0-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-n4v8b\" (UID: \"0f112a96-c395-4ae9-8960-596266eb98b0\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-n4v8b" Oct 01 14:17:55 crc kubenswrapper[4605]: I1001 14:17:55.659800 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0f112a96-c395-4ae9-8960-596266eb98b0-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-n4v8b\" (UID: \"0f112a96-c395-4ae9-8960-596266eb98b0\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-n4v8b" Oct 01 14:17:55 crc kubenswrapper[4605]: I1001 14:17:55.665673 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0f112a96-c395-4ae9-8960-596266eb98b0-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-n4v8b\" (UID: \"0f112a96-c395-4ae9-8960-596266eb98b0\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-n4v8b" Oct 01 14:17:55 crc kubenswrapper[4605]: I1001 14:17:55.680629 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nbm2d\" (UniqueName: \"kubernetes.io/projected/0f112a96-c395-4ae9-8960-596266eb98b0-kube-api-access-nbm2d\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-n4v8b\" (UID: \"0f112a96-c395-4ae9-8960-596266eb98b0\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-n4v8b" Oct 01 14:17:55 crc kubenswrapper[4605]: I1001 14:17:55.840233 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-n4v8b" Oct 01 14:17:56 crc kubenswrapper[4605]: I1001 14:17:56.367544 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-n4v8b"] Oct 01 14:17:56 crc kubenswrapper[4605]: I1001 14:17:56.416322 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-n4v8b" event={"ID":"0f112a96-c395-4ae9-8960-596266eb98b0","Type":"ContainerStarted","Data":"6d4899db04050ddae307e45f2557ca7652391ea196b01cd0dad1abfc26bab586"} Oct 01 14:17:59 crc kubenswrapper[4605]: I1001 14:17:59.450863 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-n4v8b" event={"ID":"0f112a96-c395-4ae9-8960-596266eb98b0","Type":"ContainerStarted","Data":"99bb6ff85da88c878523a7f8f14c69f3b64aaaf1c5a1641a7636801ea030fa95"} Oct 01 14:17:59 crc kubenswrapper[4605]: I1001 14:17:59.474687 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-n4v8b" podStartSLOduration=2.8039799690000002 podStartE2EDuration="4.474663663s" podCreationTimestamp="2025-10-01 14:17:55 +0000 UTC" firstStartedPulling="2025-10-01 14:17:56.362720333 +0000 UTC m=+1999.106696541" lastFinishedPulling="2025-10-01 14:17:58.033404007 +0000 UTC m=+2000.777380235" observedRunningTime="2025-10-01 14:17:59.467353438 +0000 UTC m=+2002.211329656" watchObservedRunningTime="2025-10-01 14:17:59.474663663 +0000 UTC m=+2002.218639871" Oct 01 14:18:08 crc kubenswrapper[4605]: I1001 14:18:08.529809 4605 generic.go:334] "Generic (PLEG): container finished" podID="0f112a96-c395-4ae9-8960-596266eb98b0" containerID="99bb6ff85da88c878523a7f8f14c69f3b64aaaf1c5a1641a7636801ea030fa95" exitCode=0 Oct 01 14:18:08 crc kubenswrapper[4605]: I1001 14:18:08.530341 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-n4v8b" event={"ID":"0f112a96-c395-4ae9-8960-596266eb98b0","Type":"ContainerDied","Data":"99bb6ff85da88c878523a7f8f14c69f3b64aaaf1c5a1641a7636801ea030fa95"} Oct 01 14:18:09 crc kubenswrapper[4605]: I1001 14:18:09.912441 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-n4v8b" Oct 01 14:18:10 crc kubenswrapper[4605]: I1001 14:18:10.025622 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0f112a96-c395-4ae9-8960-596266eb98b0-inventory\") pod \"0f112a96-c395-4ae9-8960-596266eb98b0\" (UID: \"0f112a96-c395-4ae9-8960-596266eb98b0\") " Oct 01 14:18:10 crc kubenswrapper[4605]: I1001 14:18:10.025702 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0f112a96-c395-4ae9-8960-596266eb98b0-ssh-key\") pod \"0f112a96-c395-4ae9-8960-596266eb98b0\" (UID: \"0f112a96-c395-4ae9-8960-596266eb98b0\") " Oct 01 14:18:10 crc kubenswrapper[4605]: I1001 14:18:10.025731 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nbm2d\" (UniqueName: \"kubernetes.io/projected/0f112a96-c395-4ae9-8960-596266eb98b0-kube-api-access-nbm2d\") pod \"0f112a96-c395-4ae9-8960-596266eb98b0\" (UID: \"0f112a96-c395-4ae9-8960-596266eb98b0\") " Oct 01 14:18:10 crc kubenswrapper[4605]: I1001 14:18:10.034299 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0f112a96-c395-4ae9-8960-596266eb98b0-kube-api-access-nbm2d" (OuterVolumeSpecName: "kube-api-access-nbm2d") pod "0f112a96-c395-4ae9-8960-596266eb98b0" (UID: "0f112a96-c395-4ae9-8960-596266eb98b0"). InnerVolumeSpecName "kube-api-access-nbm2d". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:18:10 crc kubenswrapper[4605]: I1001 14:18:10.058727 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f112a96-c395-4ae9-8960-596266eb98b0-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "0f112a96-c395-4ae9-8960-596266eb98b0" (UID: "0f112a96-c395-4ae9-8960-596266eb98b0"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:18:10 crc kubenswrapper[4605]: I1001 14:18:10.058747 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f112a96-c395-4ae9-8960-596266eb98b0-inventory" (OuterVolumeSpecName: "inventory") pod "0f112a96-c395-4ae9-8960-596266eb98b0" (UID: "0f112a96-c395-4ae9-8960-596266eb98b0"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:18:10 crc kubenswrapper[4605]: I1001 14:18:10.130485 4605 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0f112a96-c395-4ae9-8960-596266eb98b0-inventory\") on node \"crc\" DevicePath \"\"" Oct 01 14:18:10 crc kubenswrapper[4605]: I1001 14:18:10.130528 4605 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0f112a96-c395-4ae9-8960-596266eb98b0-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 01 14:18:10 crc kubenswrapper[4605]: I1001 14:18:10.130543 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nbm2d\" (UniqueName: \"kubernetes.io/projected/0f112a96-c395-4ae9-8960-596266eb98b0-kube-api-access-nbm2d\") on node \"crc\" DevicePath \"\"" Oct 01 14:18:10 crc kubenswrapper[4605]: I1001 14:18:10.545772 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-n4v8b" event={"ID":"0f112a96-c395-4ae9-8960-596266eb98b0","Type":"ContainerDied","Data":"6d4899db04050ddae307e45f2557ca7652391ea196b01cd0dad1abfc26bab586"} Oct 01 14:18:10 crc kubenswrapper[4605]: I1001 14:18:10.546480 4605 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6d4899db04050ddae307e45f2557ca7652391ea196b01cd0dad1abfc26bab586" Oct 01 14:18:10 crc kubenswrapper[4605]: I1001 14:18:10.545806 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-n4v8b" Oct 01 14:18:10 crc kubenswrapper[4605]: I1001 14:18:10.636589 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp"] Oct 01 14:18:10 crc kubenswrapper[4605]: E1001 14:18:10.637231 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f112a96-c395-4ae9-8960-596266eb98b0" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Oct 01 14:18:10 crc kubenswrapper[4605]: I1001 14:18:10.637321 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f112a96-c395-4ae9-8960-596266eb98b0" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Oct 01 14:18:10 crc kubenswrapper[4605]: I1001 14:18:10.637638 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="0f112a96-c395-4ae9-8960-596266eb98b0" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Oct 01 14:18:10 crc kubenswrapper[4605]: I1001 14:18:10.638568 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp" Oct 01 14:18:10 crc kubenswrapper[4605]: I1001 14:18:10.640159 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3187f065-743c-4531-93b0-12c666bdd4c3-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp\" (UID: \"3187f065-743c-4531-93b0-12c666bdd4c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp" Oct 01 14:18:10 crc kubenswrapper[4605]: I1001 14:18:10.640205 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3187f065-743c-4531-93b0-12c666bdd4c3-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp\" (UID: \"3187f065-743c-4531-93b0-12c666bdd4c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp" Oct 01 14:18:10 crc kubenswrapper[4605]: I1001 14:18:10.640254 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3187f065-743c-4531-93b0-12c666bdd4c3-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp\" (UID: \"3187f065-743c-4531-93b0-12c666bdd4c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp" Oct 01 14:18:10 crc kubenswrapper[4605]: I1001 14:18:10.640280 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6ktt6\" (UniqueName: \"kubernetes.io/projected/3187f065-743c-4531-93b0-12c666bdd4c3-kube-api-access-6ktt6\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp\" (UID: \"3187f065-743c-4531-93b0-12c666bdd4c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp" Oct 01 14:18:10 crc kubenswrapper[4605]: I1001 14:18:10.640304 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3187f065-743c-4531-93b0-12c666bdd4c3-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp\" (UID: \"3187f065-743c-4531-93b0-12c666bdd4c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp" Oct 01 14:18:10 crc kubenswrapper[4605]: I1001 14:18:10.640331 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3187f065-743c-4531-93b0-12c666bdd4c3-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp\" (UID: \"3187f065-743c-4531-93b0-12c666bdd4c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp" Oct 01 14:18:10 crc kubenswrapper[4605]: I1001 14:18:10.640447 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3187f065-743c-4531-93b0-12c666bdd4c3-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp\" (UID: \"3187f065-743c-4531-93b0-12c666bdd4c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp" Oct 01 14:18:10 crc kubenswrapper[4605]: I1001 14:18:10.640530 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3187f065-743c-4531-93b0-12c666bdd4c3-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp\" (UID: \"3187f065-743c-4531-93b0-12c666bdd4c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp" Oct 01 14:18:10 crc kubenswrapper[4605]: I1001 14:18:10.640593 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3187f065-743c-4531-93b0-12c666bdd4c3-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp\" (UID: \"3187f065-743c-4531-93b0-12c666bdd4c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp" Oct 01 14:18:10 crc kubenswrapper[4605]: I1001 14:18:10.640625 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3187f065-743c-4531-93b0-12c666bdd4c3-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp\" (UID: \"3187f065-743c-4531-93b0-12c666bdd4c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp" Oct 01 14:18:10 crc kubenswrapper[4605]: I1001 14:18:10.640675 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3187f065-743c-4531-93b0-12c666bdd4c3-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp\" (UID: \"3187f065-743c-4531-93b0-12c666bdd4c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp" Oct 01 14:18:10 crc kubenswrapper[4605]: I1001 14:18:10.640742 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3187f065-743c-4531-93b0-12c666bdd4c3-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp\" (UID: \"3187f065-743c-4531-93b0-12c666bdd4c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp" Oct 01 14:18:10 crc kubenswrapper[4605]: I1001 14:18:10.640786 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3187f065-743c-4531-93b0-12c666bdd4c3-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp\" (UID: \"3187f065-743c-4531-93b0-12c666bdd4c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp" Oct 01 14:18:10 crc kubenswrapper[4605]: I1001 14:18:10.640819 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3187f065-743c-4531-93b0-12c666bdd4c3-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp\" (UID: \"3187f065-743c-4531-93b0-12c666bdd4c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp" Oct 01 14:18:10 crc kubenswrapper[4605]: I1001 14:18:10.644337 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-dpzpx" Oct 01 14:18:10 crc kubenswrapper[4605]: I1001 14:18:10.644389 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-ovn-default-certs-0" Oct 01 14:18:10 crc kubenswrapper[4605]: I1001 14:18:10.644431 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-libvirt-default-certs-0" Oct 01 14:18:10 crc kubenswrapper[4605]: I1001 14:18:10.644396 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-neutron-metadata-default-certs-0" Oct 01 14:18:10 crc kubenswrapper[4605]: I1001 14:18:10.644520 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 01 14:18:10 crc kubenswrapper[4605]: I1001 14:18:10.644342 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 01 14:18:10 crc kubenswrapper[4605]: I1001 14:18:10.644695 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 01 14:18:10 crc kubenswrapper[4605]: I1001 14:18:10.644735 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-telemetry-default-certs-0" Oct 01 14:18:10 crc kubenswrapper[4605]: I1001 14:18:10.658495 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp"] Oct 01 14:18:10 crc kubenswrapper[4605]: I1001 14:18:10.743454 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3187f065-743c-4531-93b0-12c666bdd4c3-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp\" (UID: \"3187f065-743c-4531-93b0-12c666bdd4c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp" Oct 01 14:18:10 crc kubenswrapper[4605]: I1001 14:18:10.743587 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3187f065-743c-4531-93b0-12c666bdd4c3-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp\" (UID: \"3187f065-743c-4531-93b0-12c666bdd4c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp" Oct 01 14:18:10 crc kubenswrapper[4605]: I1001 14:18:10.744388 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3187f065-743c-4531-93b0-12c666bdd4c3-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp\" (UID: \"3187f065-743c-4531-93b0-12c666bdd4c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp" Oct 01 14:18:10 crc kubenswrapper[4605]: I1001 14:18:10.744450 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3187f065-743c-4531-93b0-12c666bdd4c3-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp\" (UID: \"3187f065-743c-4531-93b0-12c666bdd4c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp" Oct 01 14:18:10 crc kubenswrapper[4605]: I1001 14:18:10.744477 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3187f065-743c-4531-93b0-12c666bdd4c3-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp\" (UID: \"3187f065-743c-4531-93b0-12c666bdd4c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp" Oct 01 14:18:10 crc kubenswrapper[4605]: I1001 14:18:10.744503 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3187f065-743c-4531-93b0-12c666bdd4c3-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp\" (UID: \"3187f065-743c-4531-93b0-12c666bdd4c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp" Oct 01 14:18:10 crc kubenswrapper[4605]: I1001 14:18:10.744529 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3187f065-743c-4531-93b0-12c666bdd4c3-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp\" (UID: \"3187f065-743c-4531-93b0-12c666bdd4c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp" Oct 01 14:18:10 crc kubenswrapper[4605]: I1001 14:18:10.744589 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3187f065-743c-4531-93b0-12c666bdd4c3-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp\" (UID: \"3187f065-743c-4531-93b0-12c666bdd4c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp" Oct 01 14:18:10 crc kubenswrapper[4605]: I1001 14:18:10.744622 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3187f065-743c-4531-93b0-12c666bdd4c3-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp\" (UID: \"3187f065-743c-4531-93b0-12c666bdd4c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp" Oct 01 14:18:10 crc kubenswrapper[4605]: I1001 14:18:10.744691 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3187f065-743c-4531-93b0-12c666bdd4c3-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp\" (UID: \"3187f065-743c-4531-93b0-12c666bdd4c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp" Oct 01 14:18:10 crc kubenswrapper[4605]: I1001 14:18:10.744726 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6ktt6\" (UniqueName: \"kubernetes.io/projected/3187f065-743c-4531-93b0-12c666bdd4c3-kube-api-access-6ktt6\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp\" (UID: \"3187f065-743c-4531-93b0-12c666bdd4c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp" Oct 01 14:18:10 crc kubenswrapper[4605]: I1001 14:18:10.744756 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3187f065-743c-4531-93b0-12c666bdd4c3-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp\" (UID: \"3187f065-743c-4531-93b0-12c666bdd4c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp" Oct 01 14:18:10 crc kubenswrapper[4605]: I1001 14:18:10.744784 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3187f065-743c-4531-93b0-12c666bdd4c3-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp\" (UID: \"3187f065-743c-4531-93b0-12c666bdd4c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp" Oct 01 14:18:10 crc kubenswrapper[4605]: I1001 14:18:10.744825 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3187f065-743c-4531-93b0-12c666bdd4c3-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp\" (UID: \"3187f065-743c-4531-93b0-12c666bdd4c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp" Oct 01 14:18:10 crc kubenswrapper[4605]: I1001 14:18:10.748607 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3187f065-743c-4531-93b0-12c666bdd4c3-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp\" (UID: \"3187f065-743c-4531-93b0-12c666bdd4c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp" Oct 01 14:18:10 crc kubenswrapper[4605]: I1001 14:18:10.748915 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3187f065-743c-4531-93b0-12c666bdd4c3-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp\" (UID: \"3187f065-743c-4531-93b0-12c666bdd4c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp" Oct 01 14:18:10 crc kubenswrapper[4605]: I1001 14:18:10.749493 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3187f065-743c-4531-93b0-12c666bdd4c3-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp\" (UID: \"3187f065-743c-4531-93b0-12c666bdd4c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp" Oct 01 14:18:10 crc kubenswrapper[4605]: I1001 14:18:10.750521 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3187f065-743c-4531-93b0-12c666bdd4c3-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp\" (UID: \"3187f065-743c-4531-93b0-12c666bdd4c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp" Oct 01 14:18:10 crc kubenswrapper[4605]: I1001 14:18:10.751758 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3187f065-743c-4531-93b0-12c666bdd4c3-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp\" (UID: \"3187f065-743c-4531-93b0-12c666bdd4c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp" Oct 01 14:18:10 crc kubenswrapper[4605]: I1001 14:18:10.751900 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3187f065-743c-4531-93b0-12c666bdd4c3-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp\" (UID: \"3187f065-743c-4531-93b0-12c666bdd4c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp" Oct 01 14:18:10 crc kubenswrapper[4605]: I1001 14:18:10.752251 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3187f065-743c-4531-93b0-12c666bdd4c3-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp\" (UID: \"3187f065-743c-4531-93b0-12c666bdd4c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp" Oct 01 14:18:10 crc kubenswrapper[4605]: I1001 14:18:10.752724 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3187f065-743c-4531-93b0-12c666bdd4c3-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp\" (UID: \"3187f065-743c-4531-93b0-12c666bdd4c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp" Oct 01 14:18:10 crc kubenswrapper[4605]: I1001 14:18:10.753071 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3187f065-743c-4531-93b0-12c666bdd4c3-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp\" (UID: \"3187f065-743c-4531-93b0-12c666bdd4c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp" Oct 01 14:18:10 crc kubenswrapper[4605]: I1001 14:18:10.753644 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3187f065-743c-4531-93b0-12c666bdd4c3-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp\" (UID: \"3187f065-743c-4531-93b0-12c666bdd4c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp" Oct 01 14:18:10 crc kubenswrapper[4605]: I1001 14:18:10.753578 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3187f065-743c-4531-93b0-12c666bdd4c3-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp\" (UID: \"3187f065-743c-4531-93b0-12c666bdd4c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp" Oct 01 14:18:10 crc kubenswrapper[4605]: I1001 14:18:10.754476 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3187f065-743c-4531-93b0-12c666bdd4c3-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp\" (UID: \"3187f065-743c-4531-93b0-12c666bdd4c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp" Oct 01 14:18:10 crc kubenswrapper[4605]: I1001 14:18:10.755786 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3187f065-743c-4531-93b0-12c666bdd4c3-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp\" (UID: \"3187f065-743c-4531-93b0-12c666bdd4c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp" Oct 01 14:18:10 crc kubenswrapper[4605]: I1001 14:18:10.764399 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6ktt6\" (UniqueName: \"kubernetes.io/projected/3187f065-743c-4531-93b0-12c666bdd4c3-kube-api-access-6ktt6\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp\" (UID: \"3187f065-743c-4531-93b0-12c666bdd4c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp" Oct 01 14:18:10 crc kubenswrapper[4605]: I1001 14:18:10.955168 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp" Oct 01 14:18:11 crc kubenswrapper[4605]: I1001 14:18:11.546230 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp"] Oct 01 14:18:11 crc kubenswrapper[4605]: I1001 14:18:11.564004 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp" event={"ID":"3187f065-743c-4531-93b0-12c666bdd4c3","Type":"ContainerStarted","Data":"8ed5c22bf1ccce48dd3fe8c01d0af59c39028ea20847d3e61d33f9bbe4e8e11c"} Oct 01 14:18:12 crc kubenswrapper[4605]: I1001 14:18:12.573825 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp" event={"ID":"3187f065-743c-4531-93b0-12c666bdd4c3","Type":"ContainerStarted","Data":"8ec9882f7236258415c60e2af9f390cc71d06622b6f26af1b7c7ebc02f6f9b66"} Oct 01 14:18:12 crc kubenswrapper[4605]: I1001 14:18:12.602032 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp" podStartSLOduration=1.955915217 podStartE2EDuration="2.602006342s" podCreationTimestamp="2025-10-01 14:18:10 +0000 UTC" firstStartedPulling="2025-10-01 14:18:11.554488454 +0000 UTC m=+2014.298464662" lastFinishedPulling="2025-10-01 14:18:12.200579569 +0000 UTC m=+2014.944555787" observedRunningTime="2025-10-01 14:18:12.595930478 +0000 UTC m=+2015.339906706" watchObservedRunningTime="2025-10-01 14:18:12.602006342 +0000 UTC m=+2015.345982550" Oct 01 14:18:40 crc kubenswrapper[4605]: I1001 14:18:40.529731 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-c4rn8"] Oct 01 14:18:40 crc kubenswrapper[4605]: I1001 14:18:40.532006 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-c4rn8" Oct 01 14:18:40 crc kubenswrapper[4605]: I1001 14:18:40.549018 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-c4rn8"] Oct 01 14:18:40 crc kubenswrapper[4605]: I1001 14:18:40.717543 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0d0d62d6-622a-424d-b787-dd0ba5ea6789-catalog-content\") pod \"redhat-operators-c4rn8\" (UID: \"0d0d62d6-622a-424d-b787-dd0ba5ea6789\") " pod="openshift-marketplace/redhat-operators-c4rn8" Oct 01 14:18:40 crc kubenswrapper[4605]: I1001 14:18:40.717876 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pdcz8\" (UniqueName: \"kubernetes.io/projected/0d0d62d6-622a-424d-b787-dd0ba5ea6789-kube-api-access-pdcz8\") pod \"redhat-operators-c4rn8\" (UID: \"0d0d62d6-622a-424d-b787-dd0ba5ea6789\") " pod="openshift-marketplace/redhat-operators-c4rn8" Oct 01 14:18:40 crc kubenswrapper[4605]: I1001 14:18:40.717899 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0d0d62d6-622a-424d-b787-dd0ba5ea6789-utilities\") pod \"redhat-operators-c4rn8\" (UID: \"0d0d62d6-622a-424d-b787-dd0ba5ea6789\") " pod="openshift-marketplace/redhat-operators-c4rn8" Oct 01 14:18:40 crc kubenswrapper[4605]: I1001 14:18:40.819359 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0d0d62d6-622a-424d-b787-dd0ba5ea6789-catalog-content\") pod \"redhat-operators-c4rn8\" (UID: \"0d0d62d6-622a-424d-b787-dd0ba5ea6789\") " pod="openshift-marketplace/redhat-operators-c4rn8" Oct 01 14:18:40 crc kubenswrapper[4605]: I1001 14:18:40.819410 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0d0d62d6-622a-424d-b787-dd0ba5ea6789-utilities\") pod \"redhat-operators-c4rn8\" (UID: \"0d0d62d6-622a-424d-b787-dd0ba5ea6789\") " pod="openshift-marketplace/redhat-operators-c4rn8" Oct 01 14:18:40 crc kubenswrapper[4605]: I1001 14:18:40.819426 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pdcz8\" (UniqueName: \"kubernetes.io/projected/0d0d62d6-622a-424d-b787-dd0ba5ea6789-kube-api-access-pdcz8\") pod \"redhat-operators-c4rn8\" (UID: \"0d0d62d6-622a-424d-b787-dd0ba5ea6789\") " pod="openshift-marketplace/redhat-operators-c4rn8" Oct 01 14:18:40 crc kubenswrapper[4605]: I1001 14:18:40.820252 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0d0d62d6-622a-424d-b787-dd0ba5ea6789-catalog-content\") pod \"redhat-operators-c4rn8\" (UID: \"0d0d62d6-622a-424d-b787-dd0ba5ea6789\") " pod="openshift-marketplace/redhat-operators-c4rn8" Oct 01 14:18:40 crc kubenswrapper[4605]: I1001 14:18:40.820465 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0d0d62d6-622a-424d-b787-dd0ba5ea6789-utilities\") pod \"redhat-operators-c4rn8\" (UID: \"0d0d62d6-622a-424d-b787-dd0ba5ea6789\") " pod="openshift-marketplace/redhat-operators-c4rn8" Oct 01 14:18:40 crc kubenswrapper[4605]: I1001 14:18:40.839903 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pdcz8\" (UniqueName: \"kubernetes.io/projected/0d0d62d6-622a-424d-b787-dd0ba5ea6789-kube-api-access-pdcz8\") pod \"redhat-operators-c4rn8\" (UID: \"0d0d62d6-622a-424d-b787-dd0ba5ea6789\") " pod="openshift-marketplace/redhat-operators-c4rn8" Oct 01 14:18:40 crc kubenswrapper[4605]: I1001 14:18:40.850665 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-c4rn8" Oct 01 14:18:41 crc kubenswrapper[4605]: I1001 14:18:41.335580 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-c4rn8"] Oct 01 14:18:41 crc kubenswrapper[4605]: I1001 14:18:41.826942 4605 generic.go:334] "Generic (PLEG): container finished" podID="0d0d62d6-622a-424d-b787-dd0ba5ea6789" containerID="5d755181a1844893eb95bbfd12410d09313debf2ec52f644ffe919eb748d1556" exitCode=0 Oct 01 14:18:41 crc kubenswrapper[4605]: I1001 14:18:41.827244 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c4rn8" event={"ID":"0d0d62d6-622a-424d-b787-dd0ba5ea6789","Type":"ContainerDied","Data":"5d755181a1844893eb95bbfd12410d09313debf2ec52f644ffe919eb748d1556"} Oct 01 14:18:41 crc kubenswrapper[4605]: I1001 14:18:41.827314 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c4rn8" event={"ID":"0d0d62d6-622a-424d-b787-dd0ba5ea6789","Type":"ContainerStarted","Data":"9f8b0082406f6375287861eb17a92beba94d57d7263fc7211285d33031ca9cda"} Oct 01 14:18:42 crc kubenswrapper[4605]: I1001 14:18:42.838889 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c4rn8" event={"ID":"0d0d62d6-622a-424d-b787-dd0ba5ea6789","Type":"ContainerStarted","Data":"3ae2d2c5810ba783dcb9d1d4f1ac01167545717a5cdb5f636f97c4605fd38fcb"} Oct 01 14:18:46 crc kubenswrapper[4605]: I1001 14:18:46.881726 4605 generic.go:334] "Generic (PLEG): container finished" podID="0d0d62d6-622a-424d-b787-dd0ba5ea6789" containerID="3ae2d2c5810ba783dcb9d1d4f1ac01167545717a5cdb5f636f97c4605fd38fcb" exitCode=0 Oct 01 14:18:46 crc kubenswrapper[4605]: I1001 14:18:46.881815 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c4rn8" event={"ID":"0d0d62d6-622a-424d-b787-dd0ba5ea6789","Type":"ContainerDied","Data":"3ae2d2c5810ba783dcb9d1d4f1ac01167545717a5cdb5f636f97c4605fd38fcb"} Oct 01 14:18:47 crc kubenswrapper[4605]: I1001 14:18:47.895813 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c4rn8" event={"ID":"0d0d62d6-622a-424d-b787-dd0ba5ea6789","Type":"ContainerStarted","Data":"c8ba7c58fb8aec86c8b6de48c837abf3b5d54c4bda84ee3028af07deea14599b"} Oct 01 14:18:47 crc kubenswrapper[4605]: I1001 14:18:47.931426 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-c4rn8" podStartSLOduration=2.129404182 podStartE2EDuration="7.931054104s" podCreationTimestamp="2025-10-01 14:18:40 +0000 UTC" firstStartedPulling="2025-10-01 14:18:41.828555307 +0000 UTC m=+2044.572531515" lastFinishedPulling="2025-10-01 14:18:47.630205229 +0000 UTC m=+2050.374181437" observedRunningTime="2025-10-01 14:18:47.915655985 +0000 UTC m=+2050.659632203" watchObservedRunningTime="2025-10-01 14:18:47.931054104 +0000 UTC m=+2050.675030332" Oct 01 14:18:50 crc kubenswrapper[4605]: I1001 14:18:50.851722 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-c4rn8" Oct 01 14:18:50 crc kubenswrapper[4605]: I1001 14:18:50.852246 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-c4rn8" Oct 01 14:18:51 crc kubenswrapper[4605]: I1001 14:18:51.630719 4605 patch_prober.go:28] interesting pod/machine-config-daemon-zdjh7 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 14:18:51 crc kubenswrapper[4605]: I1001 14:18:51.630963 4605 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 14:18:51 crc kubenswrapper[4605]: I1001 14:18:51.902329 4605 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-c4rn8" podUID="0d0d62d6-622a-424d-b787-dd0ba5ea6789" containerName="registry-server" probeResult="failure" output=< Oct 01 14:18:51 crc kubenswrapper[4605]: timeout: failed to connect service ":50051" within 1s Oct 01 14:18:51 crc kubenswrapper[4605]: > Oct 01 14:18:53 crc kubenswrapper[4605]: I1001 14:18:53.951737 4605 generic.go:334] "Generic (PLEG): container finished" podID="3187f065-743c-4531-93b0-12c666bdd4c3" containerID="8ec9882f7236258415c60e2af9f390cc71d06622b6f26af1b7c7ebc02f6f9b66" exitCode=0 Oct 01 14:18:53 crc kubenswrapper[4605]: I1001 14:18:53.951851 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp" event={"ID":"3187f065-743c-4531-93b0-12c666bdd4c3","Type":"ContainerDied","Data":"8ec9882f7236258415c60e2af9f390cc71d06622b6f26af1b7c7ebc02f6f9b66"} Oct 01 14:18:55 crc kubenswrapper[4605]: I1001 14:18:55.393478 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp" Oct 01 14:18:55 crc kubenswrapper[4605]: I1001 14:18:55.495661 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3187f065-743c-4531-93b0-12c666bdd4c3-neutron-metadata-combined-ca-bundle\") pod \"3187f065-743c-4531-93b0-12c666bdd4c3\" (UID: \"3187f065-743c-4531-93b0-12c666bdd4c3\") " Oct 01 14:18:55 crc kubenswrapper[4605]: I1001 14:18:55.495708 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3187f065-743c-4531-93b0-12c666bdd4c3-telemetry-combined-ca-bundle\") pod \"3187f065-743c-4531-93b0-12c666bdd4c3\" (UID: \"3187f065-743c-4531-93b0-12c666bdd4c3\") " Oct 01 14:18:55 crc kubenswrapper[4605]: I1001 14:18:55.495750 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3187f065-743c-4531-93b0-12c666bdd4c3-nova-combined-ca-bundle\") pod \"3187f065-743c-4531-93b0-12c666bdd4c3\" (UID: \"3187f065-743c-4531-93b0-12c666bdd4c3\") " Oct 01 14:18:55 crc kubenswrapper[4605]: I1001 14:18:55.495786 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ktt6\" (UniqueName: \"kubernetes.io/projected/3187f065-743c-4531-93b0-12c666bdd4c3-kube-api-access-6ktt6\") pod \"3187f065-743c-4531-93b0-12c666bdd4c3\" (UID: \"3187f065-743c-4531-93b0-12c666bdd4c3\") " Oct 01 14:18:55 crc kubenswrapper[4605]: I1001 14:18:55.495880 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3187f065-743c-4531-93b0-12c666bdd4c3-ssh-key\") pod \"3187f065-743c-4531-93b0-12c666bdd4c3\" (UID: \"3187f065-743c-4531-93b0-12c666bdd4c3\") " Oct 01 14:18:55 crc kubenswrapper[4605]: I1001 14:18:55.495919 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3187f065-743c-4531-93b0-12c666bdd4c3-repo-setup-combined-ca-bundle\") pod \"3187f065-743c-4531-93b0-12c666bdd4c3\" (UID: \"3187f065-743c-4531-93b0-12c666bdd4c3\") " Oct 01 14:18:55 crc kubenswrapper[4605]: I1001 14:18:55.495965 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3187f065-743c-4531-93b0-12c666bdd4c3-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"3187f065-743c-4531-93b0-12c666bdd4c3\" (UID: \"3187f065-743c-4531-93b0-12c666bdd4c3\") " Oct 01 14:18:55 crc kubenswrapper[4605]: I1001 14:18:55.496021 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3187f065-743c-4531-93b0-12c666bdd4c3-ovn-combined-ca-bundle\") pod \"3187f065-743c-4531-93b0-12c666bdd4c3\" (UID: \"3187f065-743c-4531-93b0-12c666bdd4c3\") " Oct 01 14:18:55 crc kubenswrapper[4605]: I1001 14:18:55.496071 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3187f065-743c-4531-93b0-12c666bdd4c3-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"3187f065-743c-4531-93b0-12c666bdd4c3\" (UID: \"3187f065-743c-4531-93b0-12c666bdd4c3\") " Oct 01 14:18:55 crc kubenswrapper[4605]: I1001 14:18:55.496116 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3187f065-743c-4531-93b0-12c666bdd4c3-inventory\") pod \"3187f065-743c-4531-93b0-12c666bdd4c3\" (UID: \"3187f065-743c-4531-93b0-12c666bdd4c3\") " Oct 01 14:18:55 crc kubenswrapper[4605]: I1001 14:18:55.496140 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3187f065-743c-4531-93b0-12c666bdd4c3-openstack-edpm-ipam-ovn-default-certs-0\") pod \"3187f065-743c-4531-93b0-12c666bdd4c3\" (UID: \"3187f065-743c-4531-93b0-12c666bdd4c3\") " Oct 01 14:18:55 crc kubenswrapper[4605]: I1001 14:18:55.496163 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3187f065-743c-4531-93b0-12c666bdd4c3-bootstrap-combined-ca-bundle\") pod \"3187f065-743c-4531-93b0-12c666bdd4c3\" (UID: \"3187f065-743c-4531-93b0-12c666bdd4c3\") " Oct 01 14:18:55 crc kubenswrapper[4605]: I1001 14:18:55.496196 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3187f065-743c-4531-93b0-12c666bdd4c3-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"3187f065-743c-4531-93b0-12c666bdd4c3\" (UID: \"3187f065-743c-4531-93b0-12c666bdd4c3\") " Oct 01 14:18:55 crc kubenswrapper[4605]: I1001 14:18:55.496259 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3187f065-743c-4531-93b0-12c666bdd4c3-libvirt-combined-ca-bundle\") pod \"3187f065-743c-4531-93b0-12c666bdd4c3\" (UID: \"3187f065-743c-4531-93b0-12c666bdd4c3\") " Oct 01 14:18:55 crc kubenswrapper[4605]: I1001 14:18:55.501331 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3187f065-743c-4531-93b0-12c666bdd4c3-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "3187f065-743c-4531-93b0-12c666bdd4c3" (UID: "3187f065-743c-4531-93b0-12c666bdd4c3"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:18:55 crc kubenswrapper[4605]: I1001 14:18:55.502224 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3187f065-743c-4531-93b0-12c666bdd4c3-openstack-edpm-ipam-ovn-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-ovn-default-certs-0") pod "3187f065-743c-4531-93b0-12c666bdd4c3" (UID: "3187f065-743c-4531-93b0-12c666bdd4c3"). InnerVolumeSpecName "openstack-edpm-ipam-ovn-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:18:55 crc kubenswrapper[4605]: I1001 14:18:55.503050 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3187f065-743c-4531-93b0-12c666bdd4c3-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "3187f065-743c-4531-93b0-12c666bdd4c3" (UID: "3187f065-743c-4531-93b0-12c666bdd4c3"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:18:55 crc kubenswrapper[4605]: I1001 14:18:55.503288 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3187f065-743c-4531-93b0-12c666bdd4c3-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "3187f065-743c-4531-93b0-12c666bdd4c3" (UID: "3187f065-743c-4531-93b0-12c666bdd4c3"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:18:55 crc kubenswrapper[4605]: I1001 14:18:55.503396 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3187f065-743c-4531-93b0-12c666bdd4c3-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "3187f065-743c-4531-93b0-12c666bdd4c3" (UID: "3187f065-743c-4531-93b0-12c666bdd4c3"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:18:55 crc kubenswrapper[4605]: I1001 14:18:55.504888 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3187f065-743c-4531-93b0-12c666bdd4c3-openstack-edpm-ipam-neutron-metadata-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-neutron-metadata-default-certs-0") pod "3187f065-743c-4531-93b0-12c666bdd4c3" (UID: "3187f065-743c-4531-93b0-12c666bdd4c3"). InnerVolumeSpecName "openstack-edpm-ipam-neutron-metadata-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:18:55 crc kubenswrapper[4605]: I1001 14:18:55.506677 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3187f065-743c-4531-93b0-12c666bdd4c3-openstack-edpm-ipam-telemetry-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-telemetry-default-certs-0") pod "3187f065-743c-4531-93b0-12c666bdd4c3" (UID: "3187f065-743c-4531-93b0-12c666bdd4c3"). InnerVolumeSpecName "openstack-edpm-ipam-telemetry-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:18:55 crc kubenswrapper[4605]: I1001 14:18:55.507444 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3187f065-743c-4531-93b0-12c666bdd4c3-openstack-edpm-ipam-libvirt-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-libvirt-default-certs-0") pod "3187f065-743c-4531-93b0-12c666bdd4c3" (UID: "3187f065-743c-4531-93b0-12c666bdd4c3"). InnerVolumeSpecName "openstack-edpm-ipam-libvirt-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:18:55 crc kubenswrapper[4605]: I1001 14:18:55.507842 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3187f065-743c-4531-93b0-12c666bdd4c3-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "3187f065-743c-4531-93b0-12c666bdd4c3" (UID: "3187f065-743c-4531-93b0-12c666bdd4c3"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:18:55 crc kubenswrapper[4605]: I1001 14:18:55.508730 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3187f065-743c-4531-93b0-12c666bdd4c3-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "3187f065-743c-4531-93b0-12c666bdd4c3" (UID: "3187f065-743c-4531-93b0-12c666bdd4c3"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:18:55 crc kubenswrapper[4605]: I1001 14:18:55.509577 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3187f065-743c-4531-93b0-12c666bdd4c3-kube-api-access-6ktt6" (OuterVolumeSpecName: "kube-api-access-6ktt6") pod "3187f065-743c-4531-93b0-12c666bdd4c3" (UID: "3187f065-743c-4531-93b0-12c666bdd4c3"). InnerVolumeSpecName "kube-api-access-6ktt6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:18:55 crc kubenswrapper[4605]: I1001 14:18:55.519339 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3187f065-743c-4531-93b0-12c666bdd4c3-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "3187f065-743c-4531-93b0-12c666bdd4c3" (UID: "3187f065-743c-4531-93b0-12c666bdd4c3"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:18:55 crc kubenswrapper[4605]: I1001 14:18:55.533455 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3187f065-743c-4531-93b0-12c666bdd4c3-inventory" (OuterVolumeSpecName: "inventory") pod "3187f065-743c-4531-93b0-12c666bdd4c3" (UID: "3187f065-743c-4531-93b0-12c666bdd4c3"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:18:55 crc kubenswrapper[4605]: I1001 14:18:55.544219 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3187f065-743c-4531-93b0-12c666bdd4c3-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "3187f065-743c-4531-93b0-12c666bdd4c3" (UID: "3187f065-743c-4531-93b0-12c666bdd4c3"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:18:55 crc kubenswrapper[4605]: I1001 14:18:55.599342 4605 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3187f065-743c-4531-93b0-12c666bdd4c3-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 01 14:18:55 crc kubenswrapper[4605]: I1001 14:18:55.599390 4605 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3187f065-743c-4531-93b0-12c666bdd4c3-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 14:18:55 crc kubenswrapper[4605]: I1001 14:18:55.599408 4605 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3187f065-743c-4531-93b0-12c666bdd4c3-openstack-edpm-ipam-telemetry-default-certs-0\") on node \"crc\" DevicePath \"\"" Oct 01 14:18:55 crc kubenswrapper[4605]: I1001 14:18:55.599424 4605 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3187f065-743c-4531-93b0-12c666bdd4c3-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 14:18:55 crc kubenswrapper[4605]: I1001 14:18:55.599442 4605 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3187f065-743c-4531-93b0-12c666bdd4c3-openstack-edpm-ipam-neutron-metadata-default-certs-0\") on node \"crc\" DevicePath \"\"" Oct 01 14:18:55 crc kubenswrapper[4605]: I1001 14:18:55.599460 4605 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3187f065-743c-4531-93b0-12c666bdd4c3-inventory\") on node \"crc\" DevicePath \"\"" Oct 01 14:18:55 crc kubenswrapper[4605]: I1001 14:18:55.599472 4605 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3187f065-743c-4531-93b0-12c666bdd4c3-openstack-edpm-ipam-ovn-default-certs-0\") on node \"crc\" DevicePath \"\"" Oct 01 14:18:55 crc kubenswrapper[4605]: I1001 14:18:55.599486 4605 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3187f065-743c-4531-93b0-12c666bdd4c3-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 14:18:55 crc kubenswrapper[4605]: I1001 14:18:55.599501 4605 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3187f065-743c-4531-93b0-12c666bdd4c3-openstack-edpm-ipam-libvirt-default-certs-0\") on node \"crc\" DevicePath \"\"" Oct 01 14:18:55 crc kubenswrapper[4605]: I1001 14:18:55.599513 4605 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3187f065-743c-4531-93b0-12c666bdd4c3-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 14:18:55 crc kubenswrapper[4605]: I1001 14:18:55.599525 4605 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3187f065-743c-4531-93b0-12c666bdd4c3-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 14:18:55 crc kubenswrapper[4605]: I1001 14:18:55.599542 4605 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3187f065-743c-4531-93b0-12c666bdd4c3-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 14:18:55 crc kubenswrapper[4605]: I1001 14:18:55.599554 4605 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3187f065-743c-4531-93b0-12c666bdd4c3-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 14:18:55 crc kubenswrapper[4605]: I1001 14:18:55.599565 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ktt6\" (UniqueName: \"kubernetes.io/projected/3187f065-743c-4531-93b0-12c666bdd4c3-kube-api-access-6ktt6\") on node \"crc\" DevicePath \"\"" Oct 01 14:18:55 crc kubenswrapper[4605]: I1001 14:18:55.975968 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp" event={"ID":"3187f065-743c-4531-93b0-12c666bdd4c3","Type":"ContainerDied","Data":"8ed5c22bf1ccce48dd3fe8c01d0af59c39028ea20847d3e61d33f9bbe4e8e11c"} Oct 01 14:18:55 crc kubenswrapper[4605]: I1001 14:18:55.976003 4605 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8ed5c22bf1ccce48dd3fe8c01d0af59c39028ea20847d3e61d33f9bbe4e8e11c" Oct 01 14:18:55 crc kubenswrapper[4605]: I1001 14:18:55.976017 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp" Oct 01 14:18:56 crc kubenswrapper[4605]: I1001 14:18:56.093884 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-x2z69"] Oct 01 14:18:56 crc kubenswrapper[4605]: E1001 14:18:56.094397 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3187f065-743c-4531-93b0-12c666bdd4c3" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Oct 01 14:18:56 crc kubenswrapper[4605]: I1001 14:18:56.094416 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="3187f065-743c-4531-93b0-12c666bdd4c3" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Oct 01 14:18:56 crc kubenswrapper[4605]: I1001 14:18:56.094613 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="3187f065-743c-4531-93b0-12c666bdd4c3" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Oct 01 14:18:56 crc kubenswrapper[4605]: I1001 14:18:56.095442 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-x2z69" Oct 01 14:18:56 crc kubenswrapper[4605]: I1001 14:18:56.099635 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 01 14:18:56 crc kubenswrapper[4605]: I1001 14:18:56.100218 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-dpzpx" Oct 01 14:18:56 crc kubenswrapper[4605]: I1001 14:18:56.100415 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 01 14:18:56 crc kubenswrapper[4605]: I1001 14:18:56.101774 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Oct 01 14:18:56 crc kubenswrapper[4605]: I1001 14:18:56.102448 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 01 14:18:56 crc kubenswrapper[4605]: I1001 14:18:56.104037 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-x2z69"] Oct 01 14:18:56 crc kubenswrapper[4605]: I1001 14:18:56.211764 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64e9c2e1-759a-4bb7-9fd4-56190af5f1b9-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-x2z69\" (UID: \"64e9c2e1-759a-4bb7-9fd4-56190af5f1b9\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-x2z69" Oct 01 14:18:56 crc kubenswrapper[4605]: I1001 14:18:56.211853 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/64e9c2e1-759a-4bb7-9fd4-56190af5f1b9-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-x2z69\" (UID: \"64e9c2e1-759a-4bb7-9fd4-56190af5f1b9\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-x2z69" Oct 01 14:18:56 crc kubenswrapper[4605]: I1001 14:18:56.211890 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8vwql\" (UniqueName: \"kubernetes.io/projected/64e9c2e1-759a-4bb7-9fd4-56190af5f1b9-kube-api-access-8vwql\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-x2z69\" (UID: \"64e9c2e1-759a-4bb7-9fd4-56190af5f1b9\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-x2z69" Oct 01 14:18:56 crc kubenswrapper[4605]: I1001 14:18:56.211965 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/64e9c2e1-759a-4bb7-9fd4-56190af5f1b9-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-x2z69\" (UID: \"64e9c2e1-759a-4bb7-9fd4-56190af5f1b9\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-x2z69" Oct 01 14:18:56 crc kubenswrapper[4605]: I1001 14:18:56.211992 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/64e9c2e1-759a-4bb7-9fd4-56190af5f1b9-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-x2z69\" (UID: \"64e9c2e1-759a-4bb7-9fd4-56190af5f1b9\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-x2z69" Oct 01 14:18:56 crc kubenswrapper[4605]: I1001 14:18:56.314074 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/64e9c2e1-759a-4bb7-9fd4-56190af5f1b9-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-x2z69\" (UID: \"64e9c2e1-759a-4bb7-9fd4-56190af5f1b9\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-x2z69" Oct 01 14:18:56 crc kubenswrapper[4605]: I1001 14:18:56.314155 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8vwql\" (UniqueName: \"kubernetes.io/projected/64e9c2e1-759a-4bb7-9fd4-56190af5f1b9-kube-api-access-8vwql\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-x2z69\" (UID: \"64e9c2e1-759a-4bb7-9fd4-56190af5f1b9\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-x2z69" Oct 01 14:18:56 crc kubenswrapper[4605]: I1001 14:18:56.314216 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/64e9c2e1-759a-4bb7-9fd4-56190af5f1b9-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-x2z69\" (UID: \"64e9c2e1-759a-4bb7-9fd4-56190af5f1b9\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-x2z69" Oct 01 14:18:56 crc kubenswrapper[4605]: I1001 14:18:56.314266 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/64e9c2e1-759a-4bb7-9fd4-56190af5f1b9-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-x2z69\" (UID: \"64e9c2e1-759a-4bb7-9fd4-56190af5f1b9\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-x2z69" Oct 01 14:18:56 crc kubenswrapper[4605]: I1001 14:18:56.314364 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64e9c2e1-759a-4bb7-9fd4-56190af5f1b9-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-x2z69\" (UID: \"64e9c2e1-759a-4bb7-9fd4-56190af5f1b9\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-x2z69" Oct 01 14:18:56 crc kubenswrapper[4605]: I1001 14:18:56.315140 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/64e9c2e1-759a-4bb7-9fd4-56190af5f1b9-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-x2z69\" (UID: \"64e9c2e1-759a-4bb7-9fd4-56190af5f1b9\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-x2z69" Oct 01 14:18:56 crc kubenswrapper[4605]: I1001 14:18:56.320666 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/64e9c2e1-759a-4bb7-9fd4-56190af5f1b9-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-x2z69\" (UID: \"64e9c2e1-759a-4bb7-9fd4-56190af5f1b9\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-x2z69" Oct 01 14:18:56 crc kubenswrapper[4605]: I1001 14:18:56.320868 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64e9c2e1-759a-4bb7-9fd4-56190af5f1b9-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-x2z69\" (UID: \"64e9c2e1-759a-4bb7-9fd4-56190af5f1b9\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-x2z69" Oct 01 14:18:56 crc kubenswrapper[4605]: I1001 14:18:56.321280 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/64e9c2e1-759a-4bb7-9fd4-56190af5f1b9-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-x2z69\" (UID: \"64e9c2e1-759a-4bb7-9fd4-56190af5f1b9\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-x2z69" Oct 01 14:18:56 crc kubenswrapper[4605]: I1001 14:18:56.329447 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8vwql\" (UniqueName: \"kubernetes.io/projected/64e9c2e1-759a-4bb7-9fd4-56190af5f1b9-kube-api-access-8vwql\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-x2z69\" (UID: \"64e9c2e1-759a-4bb7-9fd4-56190af5f1b9\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-x2z69" Oct 01 14:18:56 crc kubenswrapper[4605]: I1001 14:18:56.412715 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-x2z69" Oct 01 14:18:57 crc kubenswrapper[4605]: I1001 14:18:57.024135 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-x2z69"] Oct 01 14:18:57 crc kubenswrapper[4605]: I1001 14:18:57.994548 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-x2z69" event={"ID":"64e9c2e1-759a-4bb7-9fd4-56190af5f1b9","Type":"ContainerStarted","Data":"8ae7b1afbde75649784411dfe006c52b78b2fb35c4d77e11faf007da6b895911"} Oct 01 14:18:57 crc kubenswrapper[4605]: I1001 14:18:57.995106 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-x2z69" event={"ID":"64e9c2e1-759a-4bb7-9fd4-56190af5f1b9","Type":"ContainerStarted","Data":"12ebbc93c9e4fbc52778c26dd3f7675685251cc7d91558aa3326d6eb8a7235a4"} Oct 01 14:18:58 crc kubenswrapper[4605]: I1001 14:18:58.015087 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-x2z69" podStartSLOduration=1.594037798 podStartE2EDuration="2.015065977s" podCreationTimestamp="2025-10-01 14:18:56 +0000 UTC" firstStartedPulling="2025-10-01 14:18:57.039617553 +0000 UTC m=+2059.783593761" lastFinishedPulling="2025-10-01 14:18:57.460645732 +0000 UTC m=+2060.204621940" observedRunningTime="2025-10-01 14:18:58.010580864 +0000 UTC m=+2060.754557072" watchObservedRunningTime="2025-10-01 14:18:58.015065977 +0000 UTC m=+2060.759042185" Oct 01 14:19:01 crc kubenswrapper[4605]: I1001 14:19:01.905530 4605 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-c4rn8" podUID="0d0d62d6-622a-424d-b787-dd0ba5ea6789" containerName="registry-server" probeResult="failure" output=< Oct 01 14:19:01 crc kubenswrapper[4605]: timeout: failed to connect service ":50051" within 1s Oct 01 14:19:01 crc kubenswrapper[4605]: > Oct 01 14:19:10 crc kubenswrapper[4605]: I1001 14:19:10.891849 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-c4rn8" Oct 01 14:19:10 crc kubenswrapper[4605]: I1001 14:19:10.965442 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-c4rn8" Oct 01 14:19:11 crc kubenswrapper[4605]: I1001 14:19:11.733183 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-c4rn8"] Oct 01 14:19:12 crc kubenswrapper[4605]: I1001 14:19:12.100711 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-c4rn8" podUID="0d0d62d6-622a-424d-b787-dd0ba5ea6789" containerName="registry-server" containerID="cri-o://c8ba7c58fb8aec86c8b6de48c837abf3b5d54c4bda84ee3028af07deea14599b" gracePeriod=2 Oct 01 14:19:12 crc kubenswrapper[4605]: I1001 14:19:12.510217 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-c4rn8" Oct 01 14:19:12 crc kubenswrapper[4605]: I1001 14:19:12.612128 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0d0d62d6-622a-424d-b787-dd0ba5ea6789-catalog-content\") pod \"0d0d62d6-622a-424d-b787-dd0ba5ea6789\" (UID: \"0d0d62d6-622a-424d-b787-dd0ba5ea6789\") " Oct 01 14:19:12 crc kubenswrapper[4605]: I1001 14:19:12.612326 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0d0d62d6-622a-424d-b787-dd0ba5ea6789-utilities\") pod \"0d0d62d6-622a-424d-b787-dd0ba5ea6789\" (UID: \"0d0d62d6-622a-424d-b787-dd0ba5ea6789\") " Oct 01 14:19:12 crc kubenswrapper[4605]: I1001 14:19:12.612478 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pdcz8\" (UniqueName: \"kubernetes.io/projected/0d0d62d6-622a-424d-b787-dd0ba5ea6789-kube-api-access-pdcz8\") pod \"0d0d62d6-622a-424d-b787-dd0ba5ea6789\" (UID: \"0d0d62d6-622a-424d-b787-dd0ba5ea6789\") " Oct 01 14:19:12 crc kubenswrapper[4605]: I1001 14:19:12.613192 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0d0d62d6-622a-424d-b787-dd0ba5ea6789-utilities" (OuterVolumeSpecName: "utilities") pod "0d0d62d6-622a-424d-b787-dd0ba5ea6789" (UID: "0d0d62d6-622a-424d-b787-dd0ba5ea6789"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:19:12 crc kubenswrapper[4605]: I1001 14:19:12.617872 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0d0d62d6-622a-424d-b787-dd0ba5ea6789-kube-api-access-pdcz8" (OuterVolumeSpecName: "kube-api-access-pdcz8") pod "0d0d62d6-622a-424d-b787-dd0ba5ea6789" (UID: "0d0d62d6-622a-424d-b787-dd0ba5ea6789"). InnerVolumeSpecName "kube-api-access-pdcz8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:19:12 crc kubenswrapper[4605]: I1001 14:19:12.712649 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0d0d62d6-622a-424d-b787-dd0ba5ea6789-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0d0d62d6-622a-424d-b787-dd0ba5ea6789" (UID: "0d0d62d6-622a-424d-b787-dd0ba5ea6789"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:19:12 crc kubenswrapper[4605]: I1001 14:19:12.715247 4605 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0d0d62d6-622a-424d-b787-dd0ba5ea6789-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 14:19:12 crc kubenswrapper[4605]: I1001 14:19:12.715378 4605 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0d0d62d6-622a-424d-b787-dd0ba5ea6789-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 14:19:12 crc kubenswrapper[4605]: I1001 14:19:12.715448 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pdcz8\" (UniqueName: \"kubernetes.io/projected/0d0d62d6-622a-424d-b787-dd0ba5ea6789-kube-api-access-pdcz8\") on node \"crc\" DevicePath \"\"" Oct 01 14:19:13 crc kubenswrapper[4605]: I1001 14:19:13.109476 4605 generic.go:334] "Generic (PLEG): container finished" podID="0d0d62d6-622a-424d-b787-dd0ba5ea6789" containerID="c8ba7c58fb8aec86c8b6de48c837abf3b5d54c4bda84ee3028af07deea14599b" exitCode=0 Oct 01 14:19:13 crc kubenswrapper[4605]: I1001 14:19:13.109516 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c4rn8" event={"ID":"0d0d62d6-622a-424d-b787-dd0ba5ea6789","Type":"ContainerDied","Data":"c8ba7c58fb8aec86c8b6de48c837abf3b5d54c4bda84ee3028af07deea14599b"} Oct 01 14:19:13 crc kubenswrapper[4605]: I1001 14:19:13.109542 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c4rn8" event={"ID":"0d0d62d6-622a-424d-b787-dd0ba5ea6789","Type":"ContainerDied","Data":"9f8b0082406f6375287861eb17a92beba94d57d7263fc7211285d33031ca9cda"} Oct 01 14:19:13 crc kubenswrapper[4605]: I1001 14:19:13.109559 4605 scope.go:117] "RemoveContainer" containerID="c8ba7c58fb8aec86c8b6de48c837abf3b5d54c4bda84ee3028af07deea14599b" Oct 01 14:19:13 crc kubenswrapper[4605]: I1001 14:19:13.109578 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-c4rn8" Oct 01 14:19:13 crc kubenswrapper[4605]: I1001 14:19:13.130312 4605 scope.go:117] "RemoveContainer" containerID="3ae2d2c5810ba783dcb9d1d4f1ac01167545717a5cdb5f636f97c4605fd38fcb" Oct 01 14:19:13 crc kubenswrapper[4605]: I1001 14:19:13.146391 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-c4rn8"] Oct 01 14:19:13 crc kubenswrapper[4605]: I1001 14:19:13.158101 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-c4rn8"] Oct 01 14:19:13 crc kubenswrapper[4605]: I1001 14:19:13.165265 4605 scope.go:117] "RemoveContainer" containerID="5d755181a1844893eb95bbfd12410d09313debf2ec52f644ffe919eb748d1556" Oct 01 14:19:13 crc kubenswrapper[4605]: I1001 14:19:13.201190 4605 scope.go:117] "RemoveContainer" containerID="c8ba7c58fb8aec86c8b6de48c837abf3b5d54c4bda84ee3028af07deea14599b" Oct 01 14:19:13 crc kubenswrapper[4605]: E1001 14:19:13.201534 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c8ba7c58fb8aec86c8b6de48c837abf3b5d54c4bda84ee3028af07deea14599b\": container with ID starting with c8ba7c58fb8aec86c8b6de48c837abf3b5d54c4bda84ee3028af07deea14599b not found: ID does not exist" containerID="c8ba7c58fb8aec86c8b6de48c837abf3b5d54c4bda84ee3028af07deea14599b" Oct 01 14:19:13 crc kubenswrapper[4605]: I1001 14:19:13.201577 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c8ba7c58fb8aec86c8b6de48c837abf3b5d54c4bda84ee3028af07deea14599b"} err="failed to get container status \"c8ba7c58fb8aec86c8b6de48c837abf3b5d54c4bda84ee3028af07deea14599b\": rpc error: code = NotFound desc = could not find container \"c8ba7c58fb8aec86c8b6de48c837abf3b5d54c4bda84ee3028af07deea14599b\": container with ID starting with c8ba7c58fb8aec86c8b6de48c837abf3b5d54c4bda84ee3028af07deea14599b not found: ID does not exist" Oct 01 14:19:13 crc kubenswrapper[4605]: I1001 14:19:13.201603 4605 scope.go:117] "RemoveContainer" containerID="3ae2d2c5810ba783dcb9d1d4f1ac01167545717a5cdb5f636f97c4605fd38fcb" Oct 01 14:19:13 crc kubenswrapper[4605]: E1001 14:19:13.202009 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3ae2d2c5810ba783dcb9d1d4f1ac01167545717a5cdb5f636f97c4605fd38fcb\": container with ID starting with 3ae2d2c5810ba783dcb9d1d4f1ac01167545717a5cdb5f636f97c4605fd38fcb not found: ID does not exist" containerID="3ae2d2c5810ba783dcb9d1d4f1ac01167545717a5cdb5f636f97c4605fd38fcb" Oct 01 14:19:13 crc kubenswrapper[4605]: I1001 14:19:13.202036 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3ae2d2c5810ba783dcb9d1d4f1ac01167545717a5cdb5f636f97c4605fd38fcb"} err="failed to get container status \"3ae2d2c5810ba783dcb9d1d4f1ac01167545717a5cdb5f636f97c4605fd38fcb\": rpc error: code = NotFound desc = could not find container \"3ae2d2c5810ba783dcb9d1d4f1ac01167545717a5cdb5f636f97c4605fd38fcb\": container with ID starting with 3ae2d2c5810ba783dcb9d1d4f1ac01167545717a5cdb5f636f97c4605fd38fcb not found: ID does not exist" Oct 01 14:19:13 crc kubenswrapper[4605]: I1001 14:19:13.202053 4605 scope.go:117] "RemoveContainer" containerID="5d755181a1844893eb95bbfd12410d09313debf2ec52f644ffe919eb748d1556" Oct 01 14:19:13 crc kubenswrapper[4605]: E1001 14:19:13.202354 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5d755181a1844893eb95bbfd12410d09313debf2ec52f644ffe919eb748d1556\": container with ID starting with 5d755181a1844893eb95bbfd12410d09313debf2ec52f644ffe919eb748d1556 not found: ID does not exist" containerID="5d755181a1844893eb95bbfd12410d09313debf2ec52f644ffe919eb748d1556" Oct 01 14:19:13 crc kubenswrapper[4605]: I1001 14:19:13.202400 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5d755181a1844893eb95bbfd12410d09313debf2ec52f644ffe919eb748d1556"} err="failed to get container status \"5d755181a1844893eb95bbfd12410d09313debf2ec52f644ffe919eb748d1556\": rpc error: code = NotFound desc = could not find container \"5d755181a1844893eb95bbfd12410d09313debf2ec52f644ffe919eb748d1556\": container with ID starting with 5d755181a1844893eb95bbfd12410d09313debf2ec52f644ffe919eb748d1556 not found: ID does not exist" Oct 01 14:19:13 crc kubenswrapper[4605]: I1001 14:19:13.941395 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0d0d62d6-622a-424d-b787-dd0ba5ea6789" path="/var/lib/kubelet/pods/0d0d62d6-622a-424d-b787-dd0ba5ea6789/volumes" Oct 01 14:19:21 crc kubenswrapper[4605]: I1001 14:19:21.630978 4605 patch_prober.go:28] interesting pod/machine-config-daemon-zdjh7 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 14:19:21 crc kubenswrapper[4605]: I1001 14:19:21.631667 4605 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 14:19:44 crc kubenswrapper[4605]: I1001 14:19:44.018474 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-9xhg7"] Oct 01 14:19:44 crc kubenswrapper[4605]: E1001 14:19:44.019337 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d0d62d6-622a-424d-b787-dd0ba5ea6789" containerName="extract-utilities" Oct 01 14:19:44 crc kubenswrapper[4605]: I1001 14:19:44.019352 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d0d62d6-622a-424d-b787-dd0ba5ea6789" containerName="extract-utilities" Oct 01 14:19:44 crc kubenswrapper[4605]: E1001 14:19:44.019366 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d0d62d6-622a-424d-b787-dd0ba5ea6789" containerName="extract-content" Oct 01 14:19:44 crc kubenswrapper[4605]: I1001 14:19:44.019371 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d0d62d6-622a-424d-b787-dd0ba5ea6789" containerName="extract-content" Oct 01 14:19:44 crc kubenswrapper[4605]: E1001 14:19:44.019388 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d0d62d6-622a-424d-b787-dd0ba5ea6789" containerName="registry-server" Oct 01 14:19:44 crc kubenswrapper[4605]: I1001 14:19:44.019394 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d0d62d6-622a-424d-b787-dd0ba5ea6789" containerName="registry-server" Oct 01 14:19:44 crc kubenswrapper[4605]: I1001 14:19:44.019591 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d0d62d6-622a-424d-b787-dd0ba5ea6789" containerName="registry-server" Oct 01 14:19:44 crc kubenswrapper[4605]: I1001 14:19:44.020873 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9xhg7" Oct 01 14:19:44 crc kubenswrapper[4605]: I1001 14:19:44.043733 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-9xhg7"] Oct 01 14:19:44 crc kubenswrapper[4605]: I1001 14:19:44.212539 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c8bd3b81-80c6-40f2-b14d-1caf0ac8aecf-utilities\") pod \"redhat-marketplace-9xhg7\" (UID: \"c8bd3b81-80c6-40f2-b14d-1caf0ac8aecf\") " pod="openshift-marketplace/redhat-marketplace-9xhg7" Oct 01 14:19:44 crc kubenswrapper[4605]: I1001 14:19:44.212923 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xl929\" (UniqueName: \"kubernetes.io/projected/c8bd3b81-80c6-40f2-b14d-1caf0ac8aecf-kube-api-access-xl929\") pod \"redhat-marketplace-9xhg7\" (UID: \"c8bd3b81-80c6-40f2-b14d-1caf0ac8aecf\") " pod="openshift-marketplace/redhat-marketplace-9xhg7" Oct 01 14:19:44 crc kubenswrapper[4605]: I1001 14:19:44.212972 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c8bd3b81-80c6-40f2-b14d-1caf0ac8aecf-catalog-content\") pod \"redhat-marketplace-9xhg7\" (UID: \"c8bd3b81-80c6-40f2-b14d-1caf0ac8aecf\") " pod="openshift-marketplace/redhat-marketplace-9xhg7" Oct 01 14:19:44 crc kubenswrapper[4605]: I1001 14:19:44.314895 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c8bd3b81-80c6-40f2-b14d-1caf0ac8aecf-utilities\") pod \"redhat-marketplace-9xhg7\" (UID: \"c8bd3b81-80c6-40f2-b14d-1caf0ac8aecf\") " pod="openshift-marketplace/redhat-marketplace-9xhg7" Oct 01 14:19:44 crc kubenswrapper[4605]: I1001 14:19:44.314968 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xl929\" (UniqueName: \"kubernetes.io/projected/c8bd3b81-80c6-40f2-b14d-1caf0ac8aecf-kube-api-access-xl929\") pod \"redhat-marketplace-9xhg7\" (UID: \"c8bd3b81-80c6-40f2-b14d-1caf0ac8aecf\") " pod="openshift-marketplace/redhat-marketplace-9xhg7" Oct 01 14:19:44 crc kubenswrapper[4605]: I1001 14:19:44.315026 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c8bd3b81-80c6-40f2-b14d-1caf0ac8aecf-catalog-content\") pod \"redhat-marketplace-9xhg7\" (UID: \"c8bd3b81-80c6-40f2-b14d-1caf0ac8aecf\") " pod="openshift-marketplace/redhat-marketplace-9xhg7" Oct 01 14:19:44 crc kubenswrapper[4605]: I1001 14:19:44.315550 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c8bd3b81-80c6-40f2-b14d-1caf0ac8aecf-catalog-content\") pod \"redhat-marketplace-9xhg7\" (UID: \"c8bd3b81-80c6-40f2-b14d-1caf0ac8aecf\") " pod="openshift-marketplace/redhat-marketplace-9xhg7" Oct 01 14:19:44 crc kubenswrapper[4605]: I1001 14:19:44.315727 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c8bd3b81-80c6-40f2-b14d-1caf0ac8aecf-utilities\") pod \"redhat-marketplace-9xhg7\" (UID: \"c8bd3b81-80c6-40f2-b14d-1caf0ac8aecf\") " pod="openshift-marketplace/redhat-marketplace-9xhg7" Oct 01 14:19:44 crc kubenswrapper[4605]: I1001 14:19:44.341280 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xl929\" (UniqueName: \"kubernetes.io/projected/c8bd3b81-80c6-40f2-b14d-1caf0ac8aecf-kube-api-access-xl929\") pod \"redhat-marketplace-9xhg7\" (UID: \"c8bd3b81-80c6-40f2-b14d-1caf0ac8aecf\") " pod="openshift-marketplace/redhat-marketplace-9xhg7" Oct 01 14:19:44 crc kubenswrapper[4605]: I1001 14:19:44.638318 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9xhg7" Oct 01 14:19:45 crc kubenswrapper[4605]: I1001 14:19:45.103941 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-9xhg7"] Oct 01 14:19:45 crc kubenswrapper[4605]: I1001 14:19:45.401268 4605 generic.go:334] "Generic (PLEG): container finished" podID="c8bd3b81-80c6-40f2-b14d-1caf0ac8aecf" containerID="cd57c67d02f022d7e4e79dad965d85c68f30f8a23c0292ff80f628f5a1ecd6e0" exitCode=0 Oct 01 14:19:45 crc kubenswrapper[4605]: I1001 14:19:45.401367 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9xhg7" event={"ID":"c8bd3b81-80c6-40f2-b14d-1caf0ac8aecf","Type":"ContainerDied","Data":"cd57c67d02f022d7e4e79dad965d85c68f30f8a23c0292ff80f628f5a1ecd6e0"} Oct 01 14:19:45 crc kubenswrapper[4605]: I1001 14:19:45.401606 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9xhg7" event={"ID":"c8bd3b81-80c6-40f2-b14d-1caf0ac8aecf","Type":"ContainerStarted","Data":"c2c6fd1d5b24b1cc7f9839c6d64ef676e7f4ce91e530c73ea764443201b4b88b"} Oct 01 14:19:46 crc kubenswrapper[4605]: I1001 14:19:46.409980 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9xhg7" event={"ID":"c8bd3b81-80c6-40f2-b14d-1caf0ac8aecf","Type":"ContainerStarted","Data":"e3945393487e0fb2db7f0fc8c6844900a861e76ab10bff3768d15cc15e8d9f3b"} Oct 01 14:19:47 crc kubenswrapper[4605]: I1001 14:19:47.418790 4605 generic.go:334] "Generic (PLEG): container finished" podID="c8bd3b81-80c6-40f2-b14d-1caf0ac8aecf" containerID="e3945393487e0fb2db7f0fc8c6844900a861e76ab10bff3768d15cc15e8d9f3b" exitCode=0 Oct 01 14:19:47 crc kubenswrapper[4605]: I1001 14:19:47.418883 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9xhg7" event={"ID":"c8bd3b81-80c6-40f2-b14d-1caf0ac8aecf","Type":"ContainerDied","Data":"e3945393487e0fb2db7f0fc8c6844900a861e76ab10bff3768d15cc15e8d9f3b"} Oct 01 14:19:48 crc kubenswrapper[4605]: I1001 14:19:48.453876 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9xhg7" event={"ID":"c8bd3b81-80c6-40f2-b14d-1caf0ac8aecf","Type":"ContainerStarted","Data":"ac8dc926b1212ae682144c5e62755a32a7c8a06ac382766b04fce3754a5e1364"} Oct 01 14:19:48 crc kubenswrapper[4605]: I1001 14:19:48.478046 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-9xhg7" podStartSLOduration=3.046372043 podStartE2EDuration="5.478021522s" podCreationTimestamp="2025-10-01 14:19:43 +0000 UTC" firstStartedPulling="2025-10-01 14:19:45.402858542 +0000 UTC m=+2108.146834750" lastFinishedPulling="2025-10-01 14:19:47.834508021 +0000 UTC m=+2110.578484229" observedRunningTime="2025-10-01 14:19:48.472896123 +0000 UTC m=+2111.216872321" watchObservedRunningTime="2025-10-01 14:19:48.478021522 +0000 UTC m=+2111.221997740" Oct 01 14:19:51 crc kubenswrapper[4605]: I1001 14:19:51.643854 4605 patch_prober.go:28] interesting pod/machine-config-daemon-zdjh7 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 14:19:51 crc kubenswrapper[4605]: I1001 14:19:51.644207 4605 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 14:19:51 crc kubenswrapper[4605]: I1001 14:19:51.644262 4605 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" Oct 01 14:19:51 crc kubenswrapper[4605]: I1001 14:19:51.645481 4605 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f06cb3938ac439d54af13ffb12c4fd504d9a9e79baaf158e6ffa42cfea1ffc76"} pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 01 14:19:51 crc kubenswrapper[4605]: I1001 14:19:51.645589 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" containerName="machine-config-daemon" containerID="cri-o://f06cb3938ac439d54af13ffb12c4fd504d9a9e79baaf158e6ffa42cfea1ffc76" gracePeriod=600 Oct 01 14:19:52 crc kubenswrapper[4605]: I1001 14:19:52.489893 4605 generic.go:334] "Generic (PLEG): container finished" podID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" containerID="f06cb3938ac439d54af13ffb12c4fd504d9a9e79baaf158e6ffa42cfea1ffc76" exitCode=0 Oct 01 14:19:52 crc kubenswrapper[4605]: I1001 14:19:52.490419 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" event={"ID":"f3023060-c8ae-492b-b1cb-a418d9a8e59f","Type":"ContainerDied","Data":"f06cb3938ac439d54af13ffb12c4fd504d9a9e79baaf158e6ffa42cfea1ffc76"} Oct 01 14:19:52 crc kubenswrapper[4605]: I1001 14:19:52.490447 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" event={"ID":"f3023060-c8ae-492b-b1cb-a418d9a8e59f","Type":"ContainerStarted","Data":"67fab076341529dea5f6186b116af7d19b7aea3960811da63be1260baa5a897e"} Oct 01 14:19:52 crc kubenswrapper[4605]: I1001 14:19:52.490463 4605 scope.go:117] "RemoveContainer" containerID="3c03d2675f439d8ac4c6a292ee6900d11d4b18b69f30da560ed0997de5eb5196" Oct 01 14:19:54 crc kubenswrapper[4605]: I1001 14:19:54.639134 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-9xhg7" Oct 01 14:19:54 crc kubenswrapper[4605]: I1001 14:19:54.641291 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-9xhg7" Oct 01 14:19:54 crc kubenswrapper[4605]: I1001 14:19:54.692022 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-9xhg7" Oct 01 14:19:55 crc kubenswrapper[4605]: I1001 14:19:55.569610 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-9xhg7" Oct 01 14:19:55 crc kubenswrapper[4605]: I1001 14:19:55.614316 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-9xhg7"] Oct 01 14:19:57 crc kubenswrapper[4605]: I1001 14:19:57.547843 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-9xhg7" podUID="c8bd3b81-80c6-40f2-b14d-1caf0ac8aecf" containerName="registry-server" containerID="cri-o://ac8dc926b1212ae682144c5e62755a32a7c8a06ac382766b04fce3754a5e1364" gracePeriod=2 Oct 01 14:19:58 crc kubenswrapper[4605]: I1001 14:19:58.003887 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9xhg7" Oct 01 14:19:58 crc kubenswrapper[4605]: I1001 14:19:58.093481 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c8bd3b81-80c6-40f2-b14d-1caf0ac8aecf-utilities\") pod \"c8bd3b81-80c6-40f2-b14d-1caf0ac8aecf\" (UID: \"c8bd3b81-80c6-40f2-b14d-1caf0ac8aecf\") " Oct 01 14:19:58 crc kubenswrapper[4605]: I1001 14:19:58.093851 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c8bd3b81-80c6-40f2-b14d-1caf0ac8aecf-catalog-content\") pod \"c8bd3b81-80c6-40f2-b14d-1caf0ac8aecf\" (UID: \"c8bd3b81-80c6-40f2-b14d-1caf0ac8aecf\") " Oct 01 14:19:58 crc kubenswrapper[4605]: I1001 14:19:58.093924 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xl929\" (UniqueName: \"kubernetes.io/projected/c8bd3b81-80c6-40f2-b14d-1caf0ac8aecf-kube-api-access-xl929\") pod \"c8bd3b81-80c6-40f2-b14d-1caf0ac8aecf\" (UID: \"c8bd3b81-80c6-40f2-b14d-1caf0ac8aecf\") " Oct 01 14:19:58 crc kubenswrapper[4605]: I1001 14:19:58.094348 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c8bd3b81-80c6-40f2-b14d-1caf0ac8aecf-utilities" (OuterVolumeSpecName: "utilities") pod "c8bd3b81-80c6-40f2-b14d-1caf0ac8aecf" (UID: "c8bd3b81-80c6-40f2-b14d-1caf0ac8aecf"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:19:58 crc kubenswrapper[4605]: I1001 14:19:58.094542 4605 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c8bd3b81-80c6-40f2-b14d-1caf0ac8aecf-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 14:19:58 crc kubenswrapper[4605]: I1001 14:19:58.099925 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c8bd3b81-80c6-40f2-b14d-1caf0ac8aecf-kube-api-access-xl929" (OuterVolumeSpecName: "kube-api-access-xl929") pod "c8bd3b81-80c6-40f2-b14d-1caf0ac8aecf" (UID: "c8bd3b81-80c6-40f2-b14d-1caf0ac8aecf"). InnerVolumeSpecName "kube-api-access-xl929". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:19:58 crc kubenswrapper[4605]: I1001 14:19:58.117126 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c8bd3b81-80c6-40f2-b14d-1caf0ac8aecf-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c8bd3b81-80c6-40f2-b14d-1caf0ac8aecf" (UID: "c8bd3b81-80c6-40f2-b14d-1caf0ac8aecf"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:19:58 crc kubenswrapper[4605]: I1001 14:19:58.196216 4605 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c8bd3b81-80c6-40f2-b14d-1caf0ac8aecf-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 14:19:58 crc kubenswrapper[4605]: I1001 14:19:58.196253 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xl929\" (UniqueName: \"kubernetes.io/projected/c8bd3b81-80c6-40f2-b14d-1caf0ac8aecf-kube-api-access-xl929\") on node \"crc\" DevicePath \"\"" Oct 01 14:19:58 crc kubenswrapper[4605]: I1001 14:19:58.559685 4605 generic.go:334] "Generic (PLEG): container finished" podID="c8bd3b81-80c6-40f2-b14d-1caf0ac8aecf" containerID="ac8dc926b1212ae682144c5e62755a32a7c8a06ac382766b04fce3754a5e1364" exitCode=0 Oct 01 14:19:58 crc kubenswrapper[4605]: I1001 14:19:58.559723 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9xhg7" event={"ID":"c8bd3b81-80c6-40f2-b14d-1caf0ac8aecf","Type":"ContainerDied","Data":"ac8dc926b1212ae682144c5e62755a32a7c8a06ac382766b04fce3754a5e1364"} Oct 01 14:19:58 crc kubenswrapper[4605]: I1001 14:19:58.559740 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9xhg7" Oct 01 14:19:58 crc kubenswrapper[4605]: I1001 14:19:58.559749 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9xhg7" event={"ID":"c8bd3b81-80c6-40f2-b14d-1caf0ac8aecf","Type":"ContainerDied","Data":"c2c6fd1d5b24b1cc7f9839c6d64ef676e7f4ce91e530c73ea764443201b4b88b"} Oct 01 14:19:58 crc kubenswrapper[4605]: I1001 14:19:58.559773 4605 scope.go:117] "RemoveContainer" containerID="ac8dc926b1212ae682144c5e62755a32a7c8a06ac382766b04fce3754a5e1364" Oct 01 14:19:58 crc kubenswrapper[4605]: I1001 14:19:58.591992 4605 scope.go:117] "RemoveContainer" containerID="e3945393487e0fb2db7f0fc8c6844900a861e76ab10bff3768d15cc15e8d9f3b" Oct 01 14:19:58 crc kubenswrapper[4605]: I1001 14:19:58.607804 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-9xhg7"] Oct 01 14:19:58 crc kubenswrapper[4605]: I1001 14:19:58.615292 4605 scope.go:117] "RemoveContainer" containerID="cd57c67d02f022d7e4e79dad965d85c68f30f8a23c0292ff80f628f5a1ecd6e0" Oct 01 14:19:58 crc kubenswrapper[4605]: I1001 14:19:58.617936 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-9xhg7"] Oct 01 14:19:58 crc kubenswrapper[4605]: I1001 14:19:58.656459 4605 scope.go:117] "RemoveContainer" containerID="ac8dc926b1212ae682144c5e62755a32a7c8a06ac382766b04fce3754a5e1364" Oct 01 14:19:58 crc kubenswrapper[4605]: E1001 14:19:58.656960 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ac8dc926b1212ae682144c5e62755a32a7c8a06ac382766b04fce3754a5e1364\": container with ID starting with ac8dc926b1212ae682144c5e62755a32a7c8a06ac382766b04fce3754a5e1364 not found: ID does not exist" containerID="ac8dc926b1212ae682144c5e62755a32a7c8a06ac382766b04fce3754a5e1364" Oct 01 14:19:58 crc kubenswrapper[4605]: I1001 14:19:58.656998 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ac8dc926b1212ae682144c5e62755a32a7c8a06ac382766b04fce3754a5e1364"} err="failed to get container status \"ac8dc926b1212ae682144c5e62755a32a7c8a06ac382766b04fce3754a5e1364\": rpc error: code = NotFound desc = could not find container \"ac8dc926b1212ae682144c5e62755a32a7c8a06ac382766b04fce3754a5e1364\": container with ID starting with ac8dc926b1212ae682144c5e62755a32a7c8a06ac382766b04fce3754a5e1364 not found: ID does not exist" Oct 01 14:19:58 crc kubenswrapper[4605]: I1001 14:19:58.657043 4605 scope.go:117] "RemoveContainer" containerID="e3945393487e0fb2db7f0fc8c6844900a861e76ab10bff3768d15cc15e8d9f3b" Oct 01 14:19:58 crc kubenswrapper[4605]: E1001 14:19:58.657647 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e3945393487e0fb2db7f0fc8c6844900a861e76ab10bff3768d15cc15e8d9f3b\": container with ID starting with e3945393487e0fb2db7f0fc8c6844900a861e76ab10bff3768d15cc15e8d9f3b not found: ID does not exist" containerID="e3945393487e0fb2db7f0fc8c6844900a861e76ab10bff3768d15cc15e8d9f3b" Oct 01 14:19:58 crc kubenswrapper[4605]: I1001 14:19:58.657674 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e3945393487e0fb2db7f0fc8c6844900a861e76ab10bff3768d15cc15e8d9f3b"} err="failed to get container status \"e3945393487e0fb2db7f0fc8c6844900a861e76ab10bff3768d15cc15e8d9f3b\": rpc error: code = NotFound desc = could not find container \"e3945393487e0fb2db7f0fc8c6844900a861e76ab10bff3768d15cc15e8d9f3b\": container with ID starting with e3945393487e0fb2db7f0fc8c6844900a861e76ab10bff3768d15cc15e8d9f3b not found: ID does not exist" Oct 01 14:19:58 crc kubenswrapper[4605]: I1001 14:19:58.657693 4605 scope.go:117] "RemoveContainer" containerID="cd57c67d02f022d7e4e79dad965d85c68f30f8a23c0292ff80f628f5a1ecd6e0" Oct 01 14:19:58 crc kubenswrapper[4605]: E1001 14:19:58.658016 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cd57c67d02f022d7e4e79dad965d85c68f30f8a23c0292ff80f628f5a1ecd6e0\": container with ID starting with cd57c67d02f022d7e4e79dad965d85c68f30f8a23c0292ff80f628f5a1ecd6e0 not found: ID does not exist" containerID="cd57c67d02f022d7e4e79dad965d85c68f30f8a23c0292ff80f628f5a1ecd6e0" Oct 01 14:19:58 crc kubenswrapper[4605]: I1001 14:19:58.658065 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cd57c67d02f022d7e4e79dad965d85c68f30f8a23c0292ff80f628f5a1ecd6e0"} err="failed to get container status \"cd57c67d02f022d7e4e79dad965d85c68f30f8a23c0292ff80f628f5a1ecd6e0\": rpc error: code = NotFound desc = could not find container \"cd57c67d02f022d7e4e79dad965d85c68f30f8a23c0292ff80f628f5a1ecd6e0\": container with ID starting with cd57c67d02f022d7e4e79dad965d85c68f30f8a23c0292ff80f628f5a1ecd6e0 not found: ID does not exist" Oct 01 14:19:58 crc kubenswrapper[4605]: E1001 14:19:58.761585 4605 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc8bd3b81_80c6_40f2_b14d_1caf0ac8aecf.slice/crio-c2c6fd1d5b24b1cc7f9839c6d64ef676e7f4ce91e530c73ea764443201b4b88b\": RecentStats: unable to find data in memory cache]" Oct 01 14:19:59 crc kubenswrapper[4605]: I1001 14:19:59.941889 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c8bd3b81-80c6-40f2-b14d-1caf0ac8aecf" path="/var/lib/kubelet/pods/c8bd3b81-80c6-40f2-b14d-1caf0ac8aecf/volumes" Oct 01 14:20:05 crc kubenswrapper[4605]: I1001 14:20:05.617939 4605 generic.go:334] "Generic (PLEG): container finished" podID="64e9c2e1-759a-4bb7-9fd4-56190af5f1b9" containerID="8ae7b1afbde75649784411dfe006c52b78b2fb35c4d77e11faf007da6b895911" exitCode=0 Oct 01 14:20:05 crc kubenswrapper[4605]: I1001 14:20:05.618019 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-x2z69" event={"ID":"64e9c2e1-759a-4bb7-9fd4-56190af5f1b9","Type":"ContainerDied","Data":"8ae7b1afbde75649784411dfe006c52b78b2fb35c4d77e11faf007da6b895911"} Oct 01 14:20:07 crc kubenswrapper[4605]: I1001 14:20:07.164974 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-x2z69" Oct 01 14:20:07 crc kubenswrapper[4605]: I1001 14:20:07.288569 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/64e9c2e1-759a-4bb7-9fd4-56190af5f1b9-ssh-key\") pod \"64e9c2e1-759a-4bb7-9fd4-56190af5f1b9\" (UID: \"64e9c2e1-759a-4bb7-9fd4-56190af5f1b9\") " Oct 01 14:20:07 crc kubenswrapper[4605]: I1001 14:20:07.288992 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/64e9c2e1-759a-4bb7-9fd4-56190af5f1b9-inventory\") pod \"64e9c2e1-759a-4bb7-9fd4-56190af5f1b9\" (UID: \"64e9c2e1-759a-4bb7-9fd4-56190af5f1b9\") " Oct 01 14:20:07 crc kubenswrapper[4605]: I1001 14:20:07.289064 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/64e9c2e1-759a-4bb7-9fd4-56190af5f1b9-ovncontroller-config-0\") pod \"64e9c2e1-759a-4bb7-9fd4-56190af5f1b9\" (UID: \"64e9c2e1-759a-4bb7-9fd4-56190af5f1b9\") " Oct 01 14:20:07 crc kubenswrapper[4605]: I1001 14:20:07.289219 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64e9c2e1-759a-4bb7-9fd4-56190af5f1b9-ovn-combined-ca-bundle\") pod \"64e9c2e1-759a-4bb7-9fd4-56190af5f1b9\" (UID: \"64e9c2e1-759a-4bb7-9fd4-56190af5f1b9\") " Oct 01 14:20:07 crc kubenswrapper[4605]: I1001 14:20:07.289746 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8vwql\" (UniqueName: \"kubernetes.io/projected/64e9c2e1-759a-4bb7-9fd4-56190af5f1b9-kube-api-access-8vwql\") pod \"64e9c2e1-759a-4bb7-9fd4-56190af5f1b9\" (UID: \"64e9c2e1-759a-4bb7-9fd4-56190af5f1b9\") " Oct 01 14:20:07 crc kubenswrapper[4605]: I1001 14:20:07.294447 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64e9c2e1-759a-4bb7-9fd4-56190af5f1b9-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "64e9c2e1-759a-4bb7-9fd4-56190af5f1b9" (UID: "64e9c2e1-759a-4bb7-9fd4-56190af5f1b9"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:20:07 crc kubenswrapper[4605]: I1001 14:20:07.299419 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/64e9c2e1-759a-4bb7-9fd4-56190af5f1b9-kube-api-access-8vwql" (OuterVolumeSpecName: "kube-api-access-8vwql") pod "64e9c2e1-759a-4bb7-9fd4-56190af5f1b9" (UID: "64e9c2e1-759a-4bb7-9fd4-56190af5f1b9"). InnerVolumeSpecName "kube-api-access-8vwql". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:20:07 crc kubenswrapper[4605]: I1001 14:20:07.319551 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64e9c2e1-759a-4bb7-9fd4-56190af5f1b9-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "64e9c2e1-759a-4bb7-9fd4-56190af5f1b9" (UID: "64e9c2e1-759a-4bb7-9fd4-56190af5f1b9"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:20:07 crc kubenswrapper[4605]: I1001 14:20:07.321588 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/64e9c2e1-759a-4bb7-9fd4-56190af5f1b9-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "64e9c2e1-759a-4bb7-9fd4-56190af5f1b9" (UID: "64e9c2e1-759a-4bb7-9fd4-56190af5f1b9"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:20:07 crc kubenswrapper[4605]: I1001 14:20:07.328782 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64e9c2e1-759a-4bb7-9fd4-56190af5f1b9-inventory" (OuterVolumeSpecName: "inventory") pod "64e9c2e1-759a-4bb7-9fd4-56190af5f1b9" (UID: "64e9c2e1-759a-4bb7-9fd4-56190af5f1b9"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:20:07 crc kubenswrapper[4605]: I1001 14:20:07.392871 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8vwql\" (UniqueName: \"kubernetes.io/projected/64e9c2e1-759a-4bb7-9fd4-56190af5f1b9-kube-api-access-8vwql\") on node \"crc\" DevicePath \"\"" Oct 01 14:20:07 crc kubenswrapper[4605]: I1001 14:20:07.392903 4605 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/64e9c2e1-759a-4bb7-9fd4-56190af5f1b9-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 01 14:20:07 crc kubenswrapper[4605]: I1001 14:20:07.392917 4605 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/64e9c2e1-759a-4bb7-9fd4-56190af5f1b9-inventory\") on node \"crc\" DevicePath \"\"" Oct 01 14:20:07 crc kubenswrapper[4605]: I1001 14:20:07.392926 4605 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/64e9c2e1-759a-4bb7-9fd4-56190af5f1b9-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Oct 01 14:20:07 crc kubenswrapper[4605]: I1001 14:20:07.392937 4605 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64e9c2e1-759a-4bb7-9fd4-56190af5f1b9-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 14:20:07 crc kubenswrapper[4605]: I1001 14:20:07.635497 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-x2z69" event={"ID":"64e9c2e1-759a-4bb7-9fd4-56190af5f1b9","Type":"ContainerDied","Data":"12ebbc93c9e4fbc52778c26dd3f7675685251cc7d91558aa3326d6eb8a7235a4"} Oct 01 14:20:07 crc kubenswrapper[4605]: I1001 14:20:07.635535 4605 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="12ebbc93c9e4fbc52778c26dd3f7675685251cc7d91558aa3326d6eb8a7235a4" Oct 01 14:20:07 crc kubenswrapper[4605]: I1001 14:20:07.635556 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-x2z69" Oct 01 14:20:07 crc kubenswrapper[4605]: I1001 14:20:07.737253 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wgm6n"] Oct 01 14:20:07 crc kubenswrapper[4605]: E1001 14:20:07.737664 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8bd3b81-80c6-40f2-b14d-1caf0ac8aecf" containerName="extract-utilities" Oct 01 14:20:07 crc kubenswrapper[4605]: I1001 14:20:07.737676 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8bd3b81-80c6-40f2-b14d-1caf0ac8aecf" containerName="extract-utilities" Oct 01 14:20:07 crc kubenswrapper[4605]: E1001 14:20:07.737691 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64e9c2e1-759a-4bb7-9fd4-56190af5f1b9" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Oct 01 14:20:07 crc kubenswrapper[4605]: I1001 14:20:07.737697 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="64e9c2e1-759a-4bb7-9fd4-56190af5f1b9" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Oct 01 14:20:07 crc kubenswrapper[4605]: E1001 14:20:07.737725 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8bd3b81-80c6-40f2-b14d-1caf0ac8aecf" containerName="registry-server" Oct 01 14:20:07 crc kubenswrapper[4605]: I1001 14:20:07.737732 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8bd3b81-80c6-40f2-b14d-1caf0ac8aecf" containerName="registry-server" Oct 01 14:20:07 crc kubenswrapper[4605]: E1001 14:20:07.737749 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8bd3b81-80c6-40f2-b14d-1caf0ac8aecf" containerName="extract-content" Oct 01 14:20:07 crc kubenswrapper[4605]: I1001 14:20:07.737754 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8bd3b81-80c6-40f2-b14d-1caf0ac8aecf" containerName="extract-content" Oct 01 14:20:07 crc kubenswrapper[4605]: I1001 14:20:07.737959 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="64e9c2e1-759a-4bb7-9fd4-56190af5f1b9" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Oct 01 14:20:07 crc kubenswrapper[4605]: I1001 14:20:07.737974 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="c8bd3b81-80c6-40f2-b14d-1caf0ac8aecf" containerName="registry-server" Oct 01 14:20:07 crc kubenswrapper[4605]: I1001 14:20:07.738631 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wgm6n" Oct 01 14:20:07 crc kubenswrapper[4605]: I1001 14:20:07.742825 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Oct 01 14:20:07 crc kubenswrapper[4605]: I1001 14:20:07.742971 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Oct 01 14:20:07 crc kubenswrapper[4605]: I1001 14:20:07.743079 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 01 14:20:07 crc kubenswrapper[4605]: I1001 14:20:07.743144 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 01 14:20:07 crc kubenswrapper[4605]: I1001 14:20:07.743232 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 01 14:20:07 crc kubenswrapper[4605]: I1001 14:20:07.744361 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-dpzpx" Oct 01 14:20:07 crc kubenswrapper[4605]: I1001 14:20:07.758244 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wgm6n"] Oct 01 14:20:07 crc kubenswrapper[4605]: I1001 14:20:07.801576 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/338fec8e-40aa-4170-9f63-dd6ae6607d2d-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-wgm6n\" (UID: \"338fec8e-40aa-4170-9f63-dd6ae6607d2d\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wgm6n" Oct 01 14:20:07 crc kubenswrapper[4605]: I1001 14:20:07.801622 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/338fec8e-40aa-4170-9f63-dd6ae6607d2d-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-wgm6n\" (UID: \"338fec8e-40aa-4170-9f63-dd6ae6607d2d\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wgm6n" Oct 01 14:20:07 crc kubenswrapper[4605]: I1001 14:20:07.801822 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/338fec8e-40aa-4170-9f63-dd6ae6607d2d-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-wgm6n\" (UID: \"338fec8e-40aa-4170-9f63-dd6ae6607d2d\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wgm6n" Oct 01 14:20:07 crc kubenswrapper[4605]: I1001 14:20:07.802036 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/338fec8e-40aa-4170-9f63-dd6ae6607d2d-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-wgm6n\" (UID: \"338fec8e-40aa-4170-9f63-dd6ae6607d2d\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wgm6n" Oct 01 14:20:07 crc kubenswrapper[4605]: I1001 14:20:07.802073 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/338fec8e-40aa-4170-9f63-dd6ae6607d2d-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-wgm6n\" (UID: \"338fec8e-40aa-4170-9f63-dd6ae6607d2d\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wgm6n" Oct 01 14:20:07 crc kubenswrapper[4605]: I1001 14:20:07.802292 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-79vjv\" (UniqueName: \"kubernetes.io/projected/338fec8e-40aa-4170-9f63-dd6ae6607d2d-kube-api-access-79vjv\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-wgm6n\" (UID: \"338fec8e-40aa-4170-9f63-dd6ae6607d2d\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wgm6n" Oct 01 14:20:07 crc kubenswrapper[4605]: I1001 14:20:07.903744 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/338fec8e-40aa-4170-9f63-dd6ae6607d2d-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-wgm6n\" (UID: \"338fec8e-40aa-4170-9f63-dd6ae6607d2d\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wgm6n" Oct 01 14:20:07 crc kubenswrapper[4605]: I1001 14:20:07.904048 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/338fec8e-40aa-4170-9f63-dd6ae6607d2d-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-wgm6n\" (UID: \"338fec8e-40aa-4170-9f63-dd6ae6607d2d\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wgm6n" Oct 01 14:20:07 crc kubenswrapper[4605]: I1001 14:20:07.904233 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/338fec8e-40aa-4170-9f63-dd6ae6607d2d-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-wgm6n\" (UID: \"338fec8e-40aa-4170-9f63-dd6ae6607d2d\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wgm6n" Oct 01 14:20:07 crc kubenswrapper[4605]: I1001 14:20:07.904313 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/338fec8e-40aa-4170-9f63-dd6ae6607d2d-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-wgm6n\" (UID: \"338fec8e-40aa-4170-9f63-dd6ae6607d2d\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wgm6n" Oct 01 14:20:07 crc kubenswrapper[4605]: I1001 14:20:07.904409 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-79vjv\" (UniqueName: \"kubernetes.io/projected/338fec8e-40aa-4170-9f63-dd6ae6607d2d-kube-api-access-79vjv\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-wgm6n\" (UID: \"338fec8e-40aa-4170-9f63-dd6ae6607d2d\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wgm6n" Oct 01 14:20:07 crc kubenswrapper[4605]: I1001 14:20:07.904504 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/338fec8e-40aa-4170-9f63-dd6ae6607d2d-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-wgm6n\" (UID: \"338fec8e-40aa-4170-9f63-dd6ae6607d2d\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wgm6n" Oct 01 14:20:07 crc kubenswrapper[4605]: I1001 14:20:07.913071 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/338fec8e-40aa-4170-9f63-dd6ae6607d2d-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-wgm6n\" (UID: \"338fec8e-40aa-4170-9f63-dd6ae6607d2d\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wgm6n" Oct 01 14:20:07 crc kubenswrapper[4605]: I1001 14:20:07.913038 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/338fec8e-40aa-4170-9f63-dd6ae6607d2d-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-wgm6n\" (UID: \"338fec8e-40aa-4170-9f63-dd6ae6607d2d\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wgm6n" Oct 01 14:20:07 crc kubenswrapper[4605]: I1001 14:20:07.913390 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/338fec8e-40aa-4170-9f63-dd6ae6607d2d-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-wgm6n\" (UID: \"338fec8e-40aa-4170-9f63-dd6ae6607d2d\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wgm6n" Oct 01 14:20:07 crc kubenswrapper[4605]: I1001 14:20:07.913687 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/338fec8e-40aa-4170-9f63-dd6ae6607d2d-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-wgm6n\" (UID: \"338fec8e-40aa-4170-9f63-dd6ae6607d2d\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wgm6n" Oct 01 14:20:07 crc kubenswrapper[4605]: I1001 14:20:07.913898 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/338fec8e-40aa-4170-9f63-dd6ae6607d2d-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-wgm6n\" (UID: \"338fec8e-40aa-4170-9f63-dd6ae6607d2d\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wgm6n" Oct 01 14:20:07 crc kubenswrapper[4605]: I1001 14:20:07.919359 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-79vjv\" (UniqueName: \"kubernetes.io/projected/338fec8e-40aa-4170-9f63-dd6ae6607d2d-kube-api-access-79vjv\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-wgm6n\" (UID: \"338fec8e-40aa-4170-9f63-dd6ae6607d2d\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wgm6n" Oct 01 14:20:08 crc kubenswrapper[4605]: I1001 14:20:08.058539 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wgm6n" Oct 01 14:20:08 crc kubenswrapper[4605]: I1001 14:20:08.565688 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wgm6n"] Oct 01 14:20:08 crc kubenswrapper[4605]: I1001 14:20:08.645187 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wgm6n" event={"ID":"338fec8e-40aa-4170-9f63-dd6ae6607d2d","Type":"ContainerStarted","Data":"9ef717138bd3c2079e3b50dfc072a3f62ee56b017163cfe15c1630ed8fec7b1c"} Oct 01 14:20:10 crc kubenswrapper[4605]: I1001 14:20:10.674141 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wgm6n" event={"ID":"338fec8e-40aa-4170-9f63-dd6ae6607d2d","Type":"ContainerStarted","Data":"83f912f52c8f32bcded83724adf2fd8b025cdb5909c219d5b648154dd95c2b70"} Oct 01 14:20:10 crc kubenswrapper[4605]: I1001 14:20:10.699633 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wgm6n" podStartSLOduration=2.491601839 podStartE2EDuration="3.69961674s" podCreationTimestamp="2025-10-01 14:20:07 +0000 UTC" firstStartedPulling="2025-10-01 14:20:08.566648136 +0000 UTC m=+2131.310624354" lastFinishedPulling="2025-10-01 14:20:09.774663047 +0000 UTC m=+2132.518639255" observedRunningTime="2025-10-01 14:20:10.691225218 +0000 UTC m=+2133.435201426" watchObservedRunningTime="2025-10-01 14:20:10.69961674 +0000 UTC m=+2133.443592948" Oct 01 14:21:00 crc kubenswrapper[4605]: I1001 14:21:00.126558 4605 generic.go:334] "Generic (PLEG): container finished" podID="338fec8e-40aa-4170-9f63-dd6ae6607d2d" containerID="83f912f52c8f32bcded83724adf2fd8b025cdb5909c219d5b648154dd95c2b70" exitCode=0 Oct 01 14:21:00 crc kubenswrapper[4605]: I1001 14:21:00.126608 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wgm6n" event={"ID":"338fec8e-40aa-4170-9f63-dd6ae6607d2d","Type":"ContainerDied","Data":"83f912f52c8f32bcded83724adf2fd8b025cdb5909c219d5b648154dd95c2b70"} Oct 01 14:21:01 crc kubenswrapper[4605]: I1001 14:21:01.498278 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wgm6n" Oct 01 14:21:01 crc kubenswrapper[4605]: I1001 14:21:01.646312 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/338fec8e-40aa-4170-9f63-dd6ae6607d2d-nova-metadata-neutron-config-0\") pod \"338fec8e-40aa-4170-9f63-dd6ae6607d2d\" (UID: \"338fec8e-40aa-4170-9f63-dd6ae6607d2d\") " Oct 01 14:21:01 crc kubenswrapper[4605]: I1001 14:21:01.646815 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/338fec8e-40aa-4170-9f63-dd6ae6607d2d-ssh-key\") pod \"338fec8e-40aa-4170-9f63-dd6ae6607d2d\" (UID: \"338fec8e-40aa-4170-9f63-dd6ae6607d2d\") " Oct 01 14:21:01 crc kubenswrapper[4605]: I1001 14:21:01.646942 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/338fec8e-40aa-4170-9f63-dd6ae6607d2d-neutron-ovn-metadata-agent-neutron-config-0\") pod \"338fec8e-40aa-4170-9f63-dd6ae6607d2d\" (UID: \"338fec8e-40aa-4170-9f63-dd6ae6607d2d\") " Oct 01 14:21:01 crc kubenswrapper[4605]: I1001 14:21:01.647024 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/338fec8e-40aa-4170-9f63-dd6ae6607d2d-neutron-metadata-combined-ca-bundle\") pod \"338fec8e-40aa-4170-9f63-dd6ae6607d2d\" (UID: \"338fec8e-40aa-4170-9f63-dd6ae6607d2d\") " Oct 01 14:21:01 crc kubenswrapper[4605]: I1001 14:21:01.647784 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/338fec8e-40aa-4170-9f63-dd6ae6607d2d-inventory\") pod \"338fec8e-40aa-4170-9f63-dd6ae6607d2d\" (UID: \"338fec8e-40aa-4170-9f63-dd6ae6607d2d\") " Oct 01 14:21:01 crc kubenswrapper[4605]: I1001 14:21:01.647871 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-79vjv\" (UniqueName: \"kubernetes.io/projected/338fec8e-40aa-4170-9f63-dd6ae6607d2d-kube-api-access-79vjv\") pod \"338fec8e-40aa-4170-9f63-dd6ae6607d2d\" (UID: \"338fec8e-40aa-4170-9f63-dd6ae6607d2d\") " Oct 01 14:21:01 crc kubenswrapper[4605]: I1001 14:21:01.657870 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/338fec8e-40aa-4170-9f63-dd6ae6607d2d-kube-api-access-79vjv" (OuterVolumeSpecName: "kube-api-access-79vjv") pod "338fec8e-40aa-4170-9f63-dd6ae6607d2d" (UID: "338fec8e-40aa-4170-9f63-dd6ae6607d2d"). InnerVolumeSpecName "kube-api-access-79vjv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:21:01 crc kubenswrapper[4605]: I1001 14:21:01.659965 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/338fec8e-40aa-4170-9f63-dd6ae6607d2d-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "338fec8e-40aa-4170-9f63-dd6ae6607d2d" (UID: "338fec8e-40aa-4170-9f63-dd6ae6607d2d"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:21:01 crc kubenswrapper[4605]: I1001 14:21:01.678346 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/338fec8e-40aa-4170-9f63-dd6ae6607d2d-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "338fec8e-40aa-4170-9f63-dd6ae6607d2d" (UID: "338fec8e-40aa-4170-9f63-dd6ae6607d2d"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:21:01 crc kubenswrapper[4605]: I1001 14:21:01.687267 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/338fec8e-40aa-4170-9f63-dd6ae6607d2d-inventory" (OuterVolumeSpecName: "inventory") pod "338fec8e-40aa-4170-9f63-dd6ae6607d2d" (UID: "338fec8e-40aa-4170-9f63-dd6ae6607d2d"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:21:01 crc kubenswrapper[4605]: I1001 14:21:01.688312 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/338fec8e-40aa-4170-9f63-dd6ae6607d2d-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "338fec8e-40aa-4170-9f63-dd6ae6607d2d" (UID: "338fec8e-40aa-4170-9f63-dd6ae6607d2d"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:21:01 crc kubenswrapper[4605]: I1001 14:21:01.701945 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/338fec8e-40aa-4170-9f63-dd6ae6607d2d-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "338fec8e-40aa-4170-9f63-dd6ae6607d2d" (UID: "338fec8e-40aa-4170-9f63-dd6ae6607d2d"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:21:01 crc kubenswrapper[4605]: I1001 14:21:01.750696 4605 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/338fec8e-40aa-4170-9f63-dd6ae6607d2d-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Oct 01 14:21:01 crc kubenswrapper[4605]: I1001 14:21:01.750727 4605 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/338fec8e-40aa-4170-9f63-dd6ae6607d2d-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 01 14:21:01 crc kubenswrapper[4605]: I1001 14:21:01.750738 4605 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/338fec8e-40aa-4170-9f63-dd6ae6607d2d-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Oct 01 14:21:01 crc kubenswrapper[4605]: I1001 14:21:01.750749 4605 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/338fec8e-40aa-4170-9f63-dd6ae6607d2d-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 14:21:01 crc kubenswrapper[4605]: I1001 14:21:01.750759 4605 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/338fec8e-40aa-4170-9f63-dd6ae6607d2d-inventory\") on node \"crc\" DevicePath \"\"" Oct 01 14:21:01 crc kubenswrapper[4605]: I1001 14:21:01.750767 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-79vjv\" (UniqueName: \"kubernetes.io/projected/338fec8e-40aa-4170-9f63-dd6ae6607d2d-kube-api-access-79vjv\") on node \"crc\" DevicePath \"\"" Oct 01 14:21:02 crc kubenswrapper[4605]: I1001 14:21:02.143985 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wgm6n" event={"ID":"338fec8e-40aa-4170-9f63-dd6ae6607d2d","Type":"ContainerDied","Data":"9ef717138bd3c2079e3b50dfc072a3f62ee56b017163cfe15c1630ed8fec7b1c"} Oct 01 14:21:02 crc kubenswrapper[4605]: I1001 14:21:02.144015 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wgm6n" Oct 01 14:21:02 crc kubenswrapper[4605]: I1001 14:21:02.144027 4605 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9ef717138bd3c2079e3b50dfc072a3f62ee56b017163cfe15c1630ed8fec7b1c" Oct 01 14:21:02 crc kubenswrapper[4605]: I1001 14:21:02.245393 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-tlk9z"] Oct 01 14:21:02 crc kubenswrapper[4605]: E1001 14:21:02.247495 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="338fec8e-40aa-4170-9f63-dd6ae6607d2d" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Oct 01 14:21:02 crc kubenswrapper[4605]: I1001 14:21:02.247523 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="338fec8e-40aa-4170-9f63-dd6ae6607d2d" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Oct 01 14:21:02 crc kubenswrapper[4605]: I1001 14:21:02.248869 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="338fec8e-40aa-4170-9f63-dd6ae6607d2d" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Oct 01 14:21:02 crc kubenswrapper[4605]: I1001 14:21:02.250714 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-tlk9z" Oct 01 14:21:02 crc kubenswrapper[4605]: I1001 14:21:02.255132 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 01 14:21:02 crc kubenswrapper[4605]: I1001 14:21:02.255567 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-dpzpx" Oct 01 14:21:02 crc kubenswrapper[4605]: I1001 14:21:02.255703 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Oct 01 14:21:02 crc kubenswrapper[4605]: I1001 14:21:02.255831 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 01 14:21:02 crc kubenswrapper[4605]: I1001 14:21:02.255993 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 01 14:21:02 crc kubenswrapper[4605]: I1001 14:21:02.305214 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-tlk9z"] Oct 01 14:21:02 crc kubenswrapper[4605]: I1001 14:21:02.369203 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9df7ad58-d542-4c8a-89fb-464689d1729c-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-tlk9z\" (UID: \"9df7ad58-d542-4c8a-89fb-464689d1729c\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-tlk9z" Oct 01 14:21:02 crc kubenswrapper[4605]: I1001 14:21:02.369369 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/9df7ad58-d542-4c8a-89fb-464689d1729c-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-tlk9z\" (UID: \"9df7ad58-d542-4c8a-89fb-464689d1729c\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-tlk9z" Oct 01 14:21:02 crc kubenswrapper[4605]: I1001 14:21:02.369394 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xwvmd\" (UniqueName: \"kubernetes.io/projected/9df7ad58-d542-4c8a-89fb-464689d1729c-kube-api-access-xwvmd\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-tlk9z\" (UID: \"9df7ad58-d542-4c8a-89fb-464689d1729c\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-tlk9z" Oct 01 14:21:02 crc kubenswrapper[4605]: I1001 14:21:02.369457 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9df7ad58-d542-4c8a-89fb-464689d1729c-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-tlk9z\" (UID: \"9df7ad58-d542-4c8a-89fb-464689d1729c\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-tlk9z" Oct 01 14:21:02 crc kubenswrapper[4605]: I1001 14:21:02.369500 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9df7ad58-d542-4c8a-89fb-464689d1729c-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-tlk9z\" (UID: \"9df7ad58-d542-4c8a-89fb-464689d1729c\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-tlk9z" Oct 01 14:21:02 crc kubenswrapper[4605]: I1001 14:21:02.471011 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9df7ad58-d542-4c8a-89fb-464689d1729c-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-tlk9z\" (UID: \"9df7ad58-d542-4c8a-89fb-464689d1729c\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-tlk9z" Oct 01 14:21:02 crc kubenswrapper[4605]: I1001 14:21:02.471176 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/9df7ad58-d542-4c8a-89fb-464689d1729c-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-tlk9z\" (UID: \"9df7ad58-d542-4c8a-89fb-464689d1729c\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-tlk9z" Oct 01 14:21:02 crc kubenswrapper[4605]: I1001 14:21:02.471206 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xwvmd\" (UniqueName: \"kubernetes.io/projected/9df7ad58-d542-4c8a-89fb-464689d1729c-kube-api-access-xwvmd\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-tlk9z\" (UID: \"9df7ad58-d542-4c8a-89fb-464689d1729c\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-tlk9z" Oct 01 14:21:02 crc kubenswrapper[4605]: I1001 14:21:02.471232 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9df7ad58-d542-4c8a-89fb-464689d1729c-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-tlk9z\" (UID: \"9df7ad58-d542-4c8a-89fb-464689d1729c\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-tlk9z" Oct 01 14:21:02 crc kubenswrapper[4605]: I1001 14:21:02.471255 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9df7ad58-d542-4c8a-89fb-464689d1729c-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-tlk9z\" (UID: \"9df7ad58-d542-4c8a-89fb-464689d1729c\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-tlk9z" Oct 01 14:21:02 crc kubenswrapper[4605]: I1001 14:21:02.474580 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9df7ad58-d542-4c8a-89fb-464689d1729c-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-tlk9z\" (UID: \"9df7ad58-d542-4c8a-89fb-464689d1729c\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-tlk9z" Oct 01 14:21:02 crc kubenswrapper[4605]: I1001 14:21:02.475019 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/9df7ad58-d542-4c8a-89fb-464689d1729c-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-tlk9z\" (UID: \"9df7ad58-d542-4c8a-89fb-464689d1729c\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-tlk9z" Oct 01 14:21:02 crc kubenswrapper[4605]: I1001 14:21:02.475085 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9df7ad58-d542-4c8a-89fb-464689d1729c-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-tlk9z\" (UID: \"9df7ad58-d542-4c8a-89fb-464689d1729c\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-tlk9z" Oct 01 14:21:02 crc kubenswrapper[4605]: I1001 14:21:02.475886 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9df7ad58-d542-4c8a-89fb-464689d1729c-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-tlk9z\" (UID: \"9df7ad58-d542-4c8a-89fb-464689d1729c\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-tlk9z" Oct 01 14:21:02 crc kubenswrapper[4605]: I1001 14:21:02.494804 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xwvmd\" (UniqueName: \"kubernetes.io/projected/9df7ad58-d542-4c8a-89fb-464689d1729c-kube-api-access-xwvmd\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-tlk9z\" (UID: \"9df7ad58-d542-4c8a-89fb-464689d1729c\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-tlk9z" Oct 01 14:21:02 crc kubenswrapper[4605]: I1001 14:21:02.613047 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-tlk9z" Oct 01 14:21:03 crc kubenswrapper[4605]: I1001 14:21:03.381509 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-tlk9z"] Oct 01 14:21:04 crc kubenswrapper[4605]: I1001 14:21:04.161459 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-tlk9z" event={"ID":"9df7ad58-d542-4c8a-89fb-464689d1729c","Type":"ContainerStarted","Data":"093f75b30cdee52347b3409c3f00a44cf76f30a2b997f37af837e579b343c4f0"} Oct 01 14:21:04 crc kubenswrapper[4605]: I1001 14:21:04.162634 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-tlk9z" event={"ID":"9df7ad58-d542-4c8a-89fb-464689d1729c","Type":"ContainerStarted","Data":"41982e3ec7aa26d61ff4eb5918414edb57fb942b489caa6ad86b448905f8d348"} Oct 01 14:21:51 crc kubenswrapper[4605]: I1001 14:21:51.631574 4605 patch_prober.go:28] interesting pod/machine-config-daemon-zdjh7 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 14:21:51 crc kubenswrapper[4605]: I1001 14:21:51.632189 4605 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 14:22:21 crc kubenswrapper[4605]: I1001 14:22:21.631534 4605 patch_prober.go:28] interesting pod/machine-config-daemon-zdjh7 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 14:22:21 crc kubenswrapper[4605]: I1001 14:22:21.632156 4605 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 14:22:25 crc kubenswrapper[4605]: I1001 14:22:25.203393 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-tlk9z" podStartSLOduration=82.801470107 podStartE2EDuration="1m23.203369031s" podCreationTimestamp="2025-10-01 14:21:02 +0000 UTC" firstStartedPulling="2025-10-01 14:21:03.384912433 +0000 UTC m=+2186.128888641" lastFinishedPulling="2025-10-01 14:21:03.786811347 +0000 UTC m=+2186.530787565" observedRunningTime="2025-10-01 14:21:04.187163953 +0000 UTC m=+2186.931140171" watchObservedRunningTime="2025-10-01 14:22:25.203369031 +0000 UTC m=+2267.947345239" Oct 01 14:22:25 crc kubenswrapper[4605]: I1001 14:22:25.206120 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-5l2wl"] Oct 01 14:22:25 crc kubenswrapper[4605]: I1001 14:22:25.208723 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5l2wl" Oct 01 14:22:25 crc kubenswrapper[4605]: I1001 14:22:25.233194 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-5l2wl"] Oct 01 14:22:25 crc kubenswrapper[4605]: I1001 14:22:25.371954 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5aca3831-fca0-4857-86c0-7627c373e89b-catalog-content\") pod \"community-operators-5l2wl\" (UID: \"5aca3831-fca0-4857-86c0-7627c373e89b\") " pod="openshift-marketplace/community-operators-5l2wl" Oct 01 14:22:25 crc kubenswrapper[4605]: I1001 14:22:25.372002 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5aca3831-fca0-4857-86c0-7627c373e89b-utilities\") pod \"community-operators-5l2wl\" (UID: \"5aca3831-fca0-4857-86c0-7627c373e89b\") " pod="openshift-marketplace/community-operators-5l2wl" Oct 01 14:22:25 crc kubenswrapper[4605]: I1001 14:22:25.372138 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-85km7\" (UniqueName: \"kubernetes.io/projected/5aca3831-fca0-4857-86c0-7627c373e89b-kube-api-access-85km7\") pod \"community-operators-5l2wl\" (UID: \"5aca3831-fca0-4857-86c0-7627c373e89b\") " pod="openshift-marketplace/community-operators-5l2wl" Oct 01 14:22:25 crc kubenswrapper[4605]: I1001 14:22:25.474220 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5aca3831-fca0-4857-86c0-7627c373e89b-utilities\") pod \"community-operators-5l2wl\" (UID: \"5aca3831-fca0-4857-86c0-7627c373e89b\") " pod="openshift-marketplace/community-operators-5l2wl" Oct 01 14:22:25 crc kubenswrapper[4605]: I1001 14:22:25.474274 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5aca3831-fca0-4857-86c0-7627c373e89b-catalog-content\") pod \"community-operators-5l2wl\" (UID: \"5aca3831-fca0-4857-86c0-7627c373e89b\") " pod="openshift-marketplace/community-operators-5l2wl" Oct 01 14:22:25 crc kubenswrapper[4605]: I1001 14:22:25.474354 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-85km7\" (UniqueName: \"kubernetes.io/projected/5aca3831-fca0-4857-86c0-7627c373e89b-kube-api-access-85km7\") pod \"community-operators-5l2wl\" (UID: \"5aca3831-fca0-4857-86c0-7627c373e89b\") " pod="openshift-marketplace/community-operators-5l2wl" Oct 01 14:22:25 crc kubenswrapper[4605]: I1001 14:22:25.474878 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5aca3831-fca0-4857-86c0-7627c373e89b-utilities\") pod \"community-operators-5l2wl\" (UID: \"5aca3831-fca0-4857-86c0-7627c373e89b\") " pod="openshift-marketplace/community-operators-5l2wl" Oct 01 14:22:25 crc kubenswrapper[4605]: I1001 14:22:25.474947 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5aca3831-fca0-4857-86c0-7627c373e89b-catalog-content\") pod \"community-operators-5l2wl\" (UID: \"5aca3831-fca0-4857-86c0-7627c373e89b\") " pod="openshift-marketplace/community-operators-5l2wl" Oct 01 14:22:25 crc kubenswrapper[4605]: I1001 14:22:25.496384 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-85km7\" (UniqueName: \"kubernetes.io/projected/5aca3831-fca0-4857-86c0-7627c373e89b-kube-api-access-85km7\") pod \"community-operators-5l2wl\" (UID: \"5aca3831-fca0-4857-86c0-7627c373e89b\") " pod="openshift-marketplace/community-operators-5l2wl" Oct 01 14:22:25 crc kubenswrapper[4605]: I1001 14:22:25.533151 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5l2wl" Oct 01 14:22:26 crc kubenswrapper[4605]: I1001 14:22:26.071505 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-5l2wl"] Oct 01 14:22:26 crc kubenswrapper[4605]: I1001 14:22:26.914679 4605 generic.go:334] "Generic (PLEG): container finished" podID="5aca3831-fca0-4857-86c0-7627c373e89b" containerID="f76a7e1cbc85e75957f2c1629febc04131d00159b97a28e87b746c293ea693b4" exitCode=0 Oct 01 14:22:26 crc kubenswrapper[4605]: I1001 14:22:26.914752 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5l2wl" event={"ID":"5aca3831-fca0-4857-86c0-7627c373e89b","Type":"ContainerDied","Data":"f76a7e1cbc85e75957f2c1629febc04131d00159b97a28e87b746c293ea693b4"} Oct 01 14:22:26 crc kubenswrapper[4605]: I1001 14:22:26.914992 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5l2wl" event={"ID":"5aca3831-fca0-4857-86c0-7627c373e89b","Type":"ContainerStarted","Data":"9e126c010a53253008cdec57db5cde55a2eb3a647ebff3603efc40f96124a275"} Oct 01 14:22:26 crc kubenswrapper[4605]: I1001 14:22:26.916451 4605 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 01 14:22:27 crc kubenswrapper[4605]: I1001 14:22:27.943417 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5l2wl" event={"ID":"5aca3831-fca0-4857-86c0-7627c373e89b","Type":"ContainerStarted","Data":"dbf7113eb35a0a93fbd19f677c5fb14cb3dc593ba609a8e2e37e9ee523fd1221"} Oct 01 14:22:29 crc kubenswrapper[4605]: I1001 14:22:29.950213 4605 generic.go:334] "Generic (PLEG): container finished" podID="5aca3831-fca0-4857-86c0-7627c373e89b" containerID="dbf7113eb35a0a93fbd19f677c5fb14cb3dc593ba609a8e2e37e9ee523fd1221" exitCode=0 Oct 01 14:22:29 crc kubenswrapper[4605]: I1001 14:22:29.950309 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5l2wl" event={"ID":"5aca3831-fca0-4857-86c0-7627c373e89b","Type":"ContainerDied","Data":"dbf7113eb35a0a93fbd19f677c5fb14cb3dc593ba609a8e2e37e9ee523fd1221"} Oct 01 14:22:30 crc kubenswrapper[4605]: I1001 14:22:30.963843 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5l2wl" event={"ID":"5aca3831-fca0-4857-86c0-7627c373e89b","Type":"ContainerStarted","Data":"2c28110873d84a967f2ccfaa382fbee7f77a02867cf4c7bee121ac50470f6b21"} Oct 01 14:22:31 crc kubenswrapper[4605]: I1001 14:22:31.005083 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-5l2wl" podStartSLOduration=2.329450314 podStartE2EDuration="6.005019747s" podCreationTimestamp="2025-10-01 14:22:25 +0000 UTC" firstStartedPulling="2025-10-01 14:22:26.916211442 +0000 UTC m=+2269.660187640" lastFinishedPulling="2025-10-01 14:22:30.591780865 +0000 UTC m=+2273.335757073" observedRunningTime="2025-10-01 14:22:30.98656969 +0000 UTC m=+2273.730545898" watchObservedRunningTime="2025-10-01 14:22:31.005019747 +0000 UTC m=+2273.748995955" Oct 01 14:22:35 crc kubenswrapper[4605]: I1001 14:22:35.534295 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-5l2wl" Oct 01 14:22:35 crc kubenswrapper[4605]: I1001 14:22:35.534730 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-5l2wl" Oct 01 14:22:36 crc kubenswrapper[4605]: I1001 14:22:36.581384 4605 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-5l2wl" podUID="5aca3831-fca0-4857-86c0-7627c373e89b" containerName="registry-server" probeResult="failure" output=< Oct 01 14:22:36 crc kubenswrapper[4605]: timeout: failed to connect service ":50051" within 1s Oct 01 14:22:36 crc kubenswrapper[4605]: > Oct 01 14:22:45 crc kubenswrapper[4605]: I1001 14:22:45.580743 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-5l2wl" Oct 01 14:22:45 crc kubenswrapper[4605]: I1001 14:22:45.641110 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-5l2wl" Oct 01 14:22:45 crc kubenswrapper[4605]: I1001 14:22:45.826149 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-5l2wl"] Oct 01 14:22:47 crc kubenswrapper[4605]: I1001 14:22:47.124400 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-5l2wl" podUID="5aca3831-fca0-4857-86c0-7627c373e89b" containerName="registry-server" containerID="cri-o://2c28110873d84a967f2ccfaa382fbee7f77a02867cf4c7bee121ac50470f6b21" gracePeriod=2 Oct 01 14:22:47 crc kubenswrapper[4605]: I1001 14:22:47.526276 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5l2wl" Oct 01 14:22:47 crc kubenswrapper[4605]: I1001 14:22:47.609677 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5aca3831-fca0-4857-86c0-7627c373e89b-utilities\") pod \"5aca3831-fca0-4857-86c0-7627c373e89b\" (UID: \"5aca3831-fca0-4857-86c0-7627c373e89b\") " Oct 01 14:22:47 crc kubenswrapper[4605]: I1001 14:22:47.609781 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-85km7\" (UniqueName: \"kubernetes.io/projected/5aca3831-fca0-4857-86c0-7627c373e89b-kube-api-access-85km7\") pod \"5aca3831-fca0-4857-86c0-7627c373e89b\" (UID: \"5aca3831-fca0-4857-86c0-7627c373e89b\") " Oct 01 14:22:47 crc kubenswrapper[4605]: I1001 14:22:47.609854 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5aca3831-fca0-4857-86c0-7627c373e89b-catalog-content\") pod \"5aca3831-fca0-4857-86c0-7627c373e89b\" (UID: \"5aca3831-fca0-4857-86c0-7627c373e89b\") " Oct 01 14:22:47 crc kubenswrapper[4605]: I1001 14:22:47.610987 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5aca3831-fca0-4857-86c0-7627c373e89b-utilities" (OuterVolumeSpecName: "utilities") pod "5aca3831-fca0-4857-86c0-7627c373e89b" (UID: "5aca3831-fca0-4857-86c0-7627c373e89b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:22:47 crc kubenswrapper[4605]: I1001 14:22:47.619392 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5aca3831-fca0-4857-86c0-7627c373e89b-kube-api-access-85km7" (OuterVolumeSpecName: "kube-api-access-85km7") pod "5aca3831-fca0-4857-86c0-7627c373e89b" (UID: "5aca3831-fca0-4857-86c0-7627c373e89b"). InnerVolumeSpecName "kube-api-access-85km7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:22:47 crc kubenswrapper[4605]: I1001 14:22:47.672053 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5aca3831-fca0-4857-86c0-7627c373e89b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5aca3831-fca0-4857-86c0-7627c373e89b" (UID: "5aca3831-fca0-4857-86c0-7627c373e89b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:22:47 crc kubenswrapper[4605]: I1001 14:22:47.712204 4605 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5aca3831-fca0-4857-86c0-7627c373e89b-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 14:22:47 crc kubenswrapper[4605]: I1001 14:22:47.712407 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-85km7\" (UniqueName: \"kubernetes.io/projected/5aca3831-fca0-4857-86c0-7627c373e89b-kube-api-access-85km7\") on node \"crc\" DevicePath \"\"" Oct 01 14:22:47 crc kubenswrapper[4605]: I1001 14:22:47.712468 4605 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5aca3831-fca0-4857-86c0-7627c373e89b-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 14:22:48 crc kubenswrapper[4605]: I1001 14:22:48.136835 4605 generic.go:334] "Generic (PLEG): container finished" podID="5aca3831-fca0-4857-86c0-7627c373e89b" containerID="2c28110873d84a967f2ccfaa382fbee7f77a02867cf4c7bee121ac50470f6b21" exitCode=0 Oct 01 14:22:48 crc kubenswrapper[4605]: I1001 14:22:48.136891 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5l2wl" Oct 01 14:22:48 crc kubenswrapper[4605]: I1001 14:22:48.136919 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5l2wl" event={"ID":"5aca3831-fca0-4857-86c0-7627c373e89b","Type":"ContainerDied","Data":"2c28110873d84a967f2ccfaa382fbee7f77a02867cf4c7bee121ac50470f6b21"} Oct 01 14:22:48 crc kubenswrapper[4605]: I1001 14:22:48.137598 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5l2wl" event={"ID":"5aca3831-fca0-4857-86c0-7627c373e89b","Type":"ContainerDied","Data":"9e126c010a53253008cdec57db5cde55a2eb3a647ebff3603efc40f96124a275"} Oct 01 14:22:48 crc kubenswrapper[4605]: I1001 14:22:48.137616 4605 scope.go:117] "RemoveContainer" containerID="2c28110873d84a967f2ccfaa382fbee7f77a02867cf4c7bee121ac50470f6b21" Oct 01 14:22:48 crc kubenswrapper[4605]: I1001 14:22:48.167686 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-5l2wl"] Oct 01 14:22:48 crc kubenswrapper[4605]: I1001 14:22:48.172324 4605 scope.go:117] "RemoveContainer" containerID="dbf7113eb35a0a93fbd19f677c5fb14cb3dc593ba609a8e2e37e9ee523fd1221" Oct 01 14:22:48 crc kubenswrapper[4605]: I1001 14:22:48.175292 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-5l2wl"] Oct 01 14:22:48 crc kubenswrapper[4605]: I1001 14:22:48.194768 4605 scope.go:117] "RemoveContainer" containerID="f76a7e1cbc85e75957f2c1629febc04131d00159b97a28e87b746c293ea693b4" Oct 01 14:22:48 crc kubenswrapper[4605]: I1001 14:22:48.244898 4605 scope.go:117] "RemoveContainer" containerID="2c28110873d84a967f2ccfaa382fbee7f77a02867cf4c7bee121ac50470f6b21" Oct 01 14:22:48 crc kubenswrapper[4605]: E1001 14:22:48.245274 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2c28110873d84a967f2ccfaa382fbee7f77a02867cf4c7bee121ac50470f6b21\": container with ID starting with 2c28110873d84a967f2ccfaa382fbee7f77a02867cf4c7bee121ac50470f6b21 not found: ID does not exist" containerID="2c28110873d84a967f2ccfaa382fbee7f77a02867cf4c7bee121ac50470f6b21" Oct 01 14:22:48 crc kubenswrapper[4605]: I1001 14:22:48.245307 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2c28110873d84a967f2ccfaa382fbee7f77a02867cf4c7bee121ac50470f6b21"} err="failed to get container status \"2c28110873d84a967f2ccfaa382fbee7f77a02867cf4c7bee121ac50470f6b21\": rpc error: code = NotFound desc = could not find container \"2c28110873d84a967f2ccfaa382fbee7f77a02867cf4c7bee121ac50470f6b21\": container with ID starting with 2c28110873d84a967f2ccfaa382fbee7f77a02867cf4c7bee121ac50470f6b21 not found: ID does not exist" Oct 01 14:22:48 crc kubenswrapper[4605]: I1001 14:22:48.245329 4605 scope.go:117] "RemoveContainer" containerID="dbf7113eb35a0a93fbd19f677c5fb14cb3dc593ba609a8e2e37e9ee523fd1221" Oct 01 14:22:48 crc kubenswrapper[4605]: E1001 14:22:48.245724 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dbf7113eb35a0a93fbd19f677c5fb14cb3dc593ba609a8e2e37e9ee523fd1221\": container with ID starting with dbf7113eb35a0a93fbd19f677c5fb14cb3dc593ba609a8e2e37e9ee523fd1221 not found: ID does not exist" containerID="dbf7113eb35a0a93fbd19f677c5fb14cb3dc593ba609a8e2e37e9ee523fd1221" Oct 01 14:22:48 crc kubenswrapper[4605]: I1001 14:22:48.245751 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dbf7113eb35a0a93fbd19f677c5fb14cb3dc593ba609a8e2e37e9ee523fd1221"} err="failed to get container status \"dbf7113eb35a0a93fbd19f677c5fb14cb3dc593ba609a8e2e37e9ee523fd1221\": rpc error: code = NotFound desc = could not find container \"dbf7113eb35a0a93fbd19f677c5fb14cb3dc593ba609a8e2e37e9ee523fd1221\": container with ID starting with dbf7113eb35a0a93fbd19f677c5fb14cb3dc593ba609a8e2e37e9ee523fd1221 not found: ID does not exist" Oct 01 14:22:48 crc kubenswrapper[4605]: I1001 14:22:48.245767 4605 scope.go:117] "RemoveContainer" containerID="f76a7e1cbc85e75957f2c1629febc04131d00159b97a28e87b746c293ea693b4" Oct 01 14:22:48 crc kubenswrapper[4605]: E1001 14:22:48.246025 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f76a7e1cbc85e75957f2c1629febc04131d00159b97a28e87b746c293ea693b4\": container with ID starting with f76a7e1cbc85e75957f2c1629febc04131d00159b97a28e87b746c293ea693b4 not found: ID does not exist" containerID="f76a7e1cbc85e75957f2c1629febc04131d00159b97a28e87b746c293ea693b4" Oct 01 14:22:48 crc kubenswrapper[4605]: I1001 14:22:48.246130 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f76a7e1cbc85e75957f2c1629febc04131d00159b97a28e87b746c293ea693b4"} err="failed to get container status \"f76a7e1cbc85e75957f2c1629febc04131d00159b97a28e87b746c293ea693b4\": rpc error: code = NotFound desc = could not find container \"f76a7e1cbc85e75957f2c1629febc04131d00159b97a28e87b746c293ea693b4\": container with ID starting with f76a7e1cbc85e75957f2c1629febc04131d00159b97a28e87b746c293ea693b4 not found: ID does not exist" Oct 01 14:22:49 crc kubenswrapper[4605]: I1001 14:22:49.941980 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5aca3831-fca0-4857-86c0-7627c373e89b" path="/var/lib/kubelet/pods/5aca3831-fca0-4857-86c0-7627c373e89b/volumes" Oct 01 14:22:51 crc kubenswrapper[4605]: I1001 14:22:51.632328 4605 patch_prober.go:28] interesting pod/machine-config-daemon-zdjh7 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 14:22:51 crc kubenswrapper[4605]: I1001 14:22:51.632753 4605 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 14:22:51 crc kubenswrapper[4605]: I1001 14:22:51.632799 4605 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" Oct 01 14:22:51 crc kubenswrapper[4605]: I1001 14:22:51.633564 4605 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"67fab076341529dea5f6186b116af7d19b7aea3960811da63be1260baa5a897e"} pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 01 14:22:51 crc kubenswrapper[4605]: I1001 14:22:51.633621 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" containerName="machine-config-daemon" containerID="cri-o://67fab076341529dea5f6186b116af7d19b7aea3960811da63be1260baa5a897e" gracePeriod=600 Oct 01 14:22:51 crc kubenswrapper[4605]: E1001 14:22:51.753958 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:22:52 crc kubenswrapper[4605]: I1001 14:22:52.180511 4605 generic.go:334] "Generic (PLEG): container finished" podID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" containerID="67fab076341529dea5f6186b116af7d19b7aea3960811da63be1260baa5a897e" exitCode=0 Oct 01 14:22:52 crc kubenswrapper[4605]: I1001 14:22:52.180629 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" event={"ID":"f3023060-c8ae-492b-b1cb-a418d9a8e59f","Type":"ContainerDied","Data":"67fab076341529dea5f6186b116af7d19b7aea3960811da63be1260baa5a897e"} Oct 01 14:22:52 crc kubenswrapper[4605]: I1001 14:22:52.181159 4605 scope.go:117] "RemoveContainer" containerID="f06cb3938ac439d54af13ffb12c4fd504d9a9e79baaf158e6ffa42cfea1ffc76" Oct 01 14:22:52 crc kubenswrapper[4605]: I1001 14:22:52.182851 4605 scope.go:117] "RemoveContainer" containerID="67fab076341529dea5f6186b116af7d19b7aea3960811da63be1260baa5a897e" Oct 01 14:22:52 crc kubenswrapper[4605]: E1001 14:22:52.184138 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:23:05 crc kubenswrapper[4605]: I1001 14:23:05.555458 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-5jn5n"] Oct 01 14:23:05 crc kubenswrapper[4605]: E1001 14:23:05.556729 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5aca3831-fca0-4857-86c0-7627c373e89b" containerName="extract-content" Oct 01 14:23:05 crc kubenswrapper[4605]: I1001 14:23:05.556755 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="5aca3831-fca0-4857-86c0-7627c373e89b" containerName="extract-content" Oct 01 14:23:05 crc kubenswrapper[4605]: E1001 14:23:05.556785 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5aca3831-fca0-4857-86c0-7627c373e89b" containerName="registry-server" Oct 01 14:23:05 crc kubenswrapper[4605]: I1001 14:23:05.556797 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="5aca3831-fca0-4857-86c0-7627c373e89b" containerName="registry-server" Oct 01 14:23:05 crc kubenswrapper[4605]: E1001 14:23:05.556823 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5aca3831-fca0-4857-86c0-7627c373e89b" containerName="extract-utilities" Oct 01 14:23:05 crc kubenswrapper[4605]: I1001 14:23:05.556836 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="5aca3831-fca0-4857-86c0-7627c373e89b" containerName="extract-utilities" Oct 01 14:23:05 crc kubenswrapper[4605]: I1001 14:23:05.557214 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="5aca3831-fca0-4857-86c0-7627c373e89b" containerName="registry-server" Oct 01 14:23:05 crc kubenswrapper[4605]: I1001 14:23:05.559714 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5jn5n" Oct 01 14:23:05 crc kubenswrapper[4605]: I1001 14:23:05.565500 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5jn5n"] Oct 01 14:23:05 crc kubenswrapper[4605]: I1001 14:23:05.678021 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fd267e16-2c9d-4162-a66e-298e05cf96a4-utilities\") pod \"certified-operators-5jn5n\" (UID: \"fd267e16-2c9d-4162-a66e-298e05cf96a4\") " pod="openshift-marketplace/certified-operators-5jn5n" Oct 01 14:23:05 crc kubenswrapper[4605]: I1001 14:23:05.678148 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fd267e16-2c9d-4162-a66e-298e05cf96a4-catalog-content\") pod \"certified-operators-5jn5n\" (UID: \"fd267e16-2c9d-4162-a66e-298e05cf96a4\") " pod="openshift-marketplace/certified-operators-5jn5n" Oct 01 14:23:05 crc kubenswrapper[4605]: I1001 14:23:05.678274 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wxq6w\" (UniqueName: \"kubernetes.io/projected/fd267e16-2c9d-4162-a66e-298e05cf96a4-kube-api-access-wxq6w\") pod \"certified-operators-5jn5n\" (UID: \"fd267e16-2c9d-4162-a66e-298e05cf96a4\") " pod="openshift-marketplace/certified-operators-5jn5n" Oct 01 14:23:05 crc kubenswrapper[4605]: I1001 14:23:05.779735 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fd267e16-2c9d-4162-a66e-298e05cf96a4-utilities\") pod \"certified-operators-5jn5n\" (UID: \"fd267e16-2c9d-4162-a66e-298e05cf96a4\") " pod="openshift-marketplace/certified-operators-5jn5n" Oct 01 14:23:05 crc kubenswrapper[4605]: I1001 14:23:05.779794 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fd267e16-2c9d-4162-a66e-298e05cf96a4-catalog-content\") pod \"certified-operators-5jn5n\" (UID: \"fd267e16-2c9d-4162-a66e-298e05cf96a4\") " pod="openshift-marketplace/certified-operators-5jn5n" Oct 01 14:23:05 crc kubenswrapper[4605]: I1001 14:23:05.779859 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wxq6w\" (UniqueName: \"kubernetes.io/projected/fd267e16-2c9d-4162-a66e-298e05cf96a4-kube-api-access-wxq6w\") pod \"certified-operators-5jn5n\" (UID: \"fd267e16-2c9d-4162-a66e-298e05cf96a4\") " pod="openshift-marketplace/certified-operators-5jn5n" Oct 01 14:23:05 crc kubenswrapper[4605]: I1001 14:23:05.780384 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fd267e16-2c9d-4162-a66e-298e05cf96a4-utilities\") pod \"certified-operators-5jn5n\" (UID: \"fd267e16-2c9d-4162-a66e-298e05cf96a4\") " pod="openshift-marketplace/certified-operators-5jn5n" Oct 01 14:23:05 crc kubenswrapper[4605]: I1001 14:23:05.780486 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fd267e16-2c9d-4162-a66e-298e05cf96a4-catalog-content\") pod \"certified-operators-5jn5n\" (UID: \"fd267e16-2c9d-4162-a66e-298e05cf96a4\") " pod="openshift-marketplace/certified-operators-5jn5n" Oct 01 14:23:05 crc kubenswrapper[4605]: I1001 14:23:05.801131 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wxq6w\" (UniqueName: \"kubernetes.io/projected/fd267e16-2c9d-4162-a66e-298e05cf96a4-kube-api-access-wxq6w\") pod \"certified-operators-5jn5n\" (UID: \"fd267e16-2c9d-4162-a66e-298e05cf96a4\") " pod="openshift-marketplace/certified-operators-5jn5n" Oct 01 14:23:05 crc kubenswrapper[4605]: I1001 14:23:05.889391 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5jn5n" Oct 01 14:23:05 crc kubenswrapper[4605]: I1001 14:23:05.929444 4605 scope.go:117] "RemoveContainer" containerID="67fab076341529dea5f6186b116af7d19b7aea3960811da63be1260baa5a897e" Oct 01 14:23:05 crc kubenswrapper[4605]: E1001 14:23:05.929956 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:23:06 crc kubenswrapper[4605]: I1001 14:23:06.474272 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5jn5n"] Oct 01 14:23:07 crc kubenswrapper[4605]: I1001 14:23:07.315734 4605 generic.go:334] "Generic (PLEG): container finished" podID="fd267e16-2c9d-4162-a66e-298e05cf96a4" containerID="1bb0511cec520e940584cb080ffb44cf2f3d69690997292939aea3004c4a9c6a" exitCode=0 Oct 01 14:23:07 crc kubenswrapper[4605]: I1001 14:23:07.315994 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5jn5n" event={"ID":"fd267e16-2c9d-4162-a66e-298e05cf96a4","Type":"ContainerDied","Data":"1bb0511cec520e940584cb080ffb44cf2f3d69690997292939aea3004c4a9c6a"} Oct 01 14:23:07 crc kubenswrapper[4605]: I1001 14:23:07.316027 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5jn5n" event={"ID":"fd267e16-2c9d-4162-a66e-298e05cf96a4","Type":"ContainerStarted","Data":"d1c39cdac835dee6722ffb1d734eee1aca901050b736746bba163931fcd78615"} Oct 01 14:23:09 crc kubenswrapper[4605]: I1001 14:23:09.347751 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5jn5n" event={"ID":"fd267e16-2c9d-4162-a66e-298e05cf96a4","Type":"ContainerStarted","Data":"330e49c4834e59fb855ec26bb4b4b4fb2c050a05e69ca7a714c1c3576a43b736"} Oct 01 14:23:10 crc kubenswrapper[4605]: I1001 14:23:10.377876 4605 generic.go:334] "Generic (PLEG): container finished" podID="fd267e16-2c9d-4162-a66e-298e05cf96a4" containerID="330e49c4834e59fb855ec26bb4b4b4fb2c050a05e69ca7a714c1c3576a43b736" exitCode=0 Oct 01 14:23:10 crc kubenswrapper[4605]: I1001 14:23:10.377950 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5jn5n" event={"ID":"fd267e16-2c9d-4162-a66e-298e05cf96a4","Type":"ContainerDied","Data":"330e49c4834e59fb855ec26bb4b4b4fb2c050a05e69ca7a714c1c3576a43b736"} Oct 01 14:23:11 crc kubenswrapper[4605]: I1001 14:23:11.388574 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5jn5n" event={"ID":"fd267e16-2c9d-4162-a66e-298e05cf96a4","Type":"ContainerStarted","Data":"ccf4bc5c0bd4a65c2d2fa7a267dc19b492f3e6ae9e2c54af4971b840abcfcc8b"} Oct 01 14:23:11 crc kubenswrapper[4605]: I1001 14:23:11.421838 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-5jn5n" podStartSLOduration=2.87047339 podStartE2EDuration="6.421818781s" podCreationTimestamp="2025-10-01 14:23:05 +0000 UTC" firstStartedPulling="2025-10-01 14:23:07.318267133 +0000 UTC m=+2310.062243341" lastFinishedPulling="2025-10-01 14:23:10.869612524 +0000 UTC m=+2313.613588732" observedRunningTime="2025-10-01 14:23:11.41940003 +0000 UTC m=+2314.163376258" watchObservedRunningTime="2025-10-01 14:23:11.421818781 +0000 UTC m=+2314.165794989" Oct 01 14:23:15 crc kubenswrapper[4605]: I1001 14:23:15.889762 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-5jn5n" Oct 01 14:23:15 crc kubenswrapper[4605]: I1001 14:23:15.891935 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-5jn5n" Oct 01 14:23:16 crc kubenswrapper[4605]: I1001 14:23:16.942688 4605 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-5jn5n" podUID="fd267e16-2c9d-4162-a66e-298e05cf96a4" containerName="registry-server" probeResult="failure" output=< Oct 01 14:23:16 crc kubenswrapper[4605]: timeout: failed to connect service ":50051" within 1s Oct 01 14:23:16 crc kubenswrapper[4605]: > Oct 01 14:23:17 crc kubenswrapper[4605]: I1001 14:23:17.932833 4605 scope.go:117] "RemoveContainer" containerID="67fab076341529dea5f6186b116af7d19b7aea3960811da63be1260baa5a897e" Oct 01 14:23:17 crc kubenswrapper[4605]: E1001 14:23:17.933182 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:23:25 crc kubenswrapper[4605]: I1001 14:23:25.941648 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-5jn5n" Oct 01 14:23:25 crc kubenswrapper[4605]: I1001 14:23:25.992496 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-5jn5n" Oct 01 14:23:26 crc kubenswrapper[4605]: I1001 14:23:26.177531 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5jn5n"] Oct 01 14:23:27 crc kubenswrapper[4605]: I1001 14:23:27.525421 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-5jn5n" podUID="fd267e16-2c9d-4162-a66e-298e05cf96a4" containerName="registry-server" containerID="cri-o://ccf4bc5c0bd4a65c2d2fa7a267dc19b492f3e6ae9e2c54af4971b840abcfcc8b" gracePeriod=2 Oct 01 14:23:27 crc kubenswrapper[4605]: I1001 14:23:27.954773 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5jn5n" Oct 01 14:23:28 crc kubenswrapper[4605]: I1001 14:23:28.016233 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fd267e16-2c9d-4162-a66e-298e05cf96a4-catalog-content\") pod \"fd267e16-2c9d-4162-a66e-298e05cf96a4\" (UID: \"fd267e16-2c9d-4162-a66e-298e05cf96a4\") " Oct 01 14:23:28 crc kubenswrapper[4605]: I1001 14:23:28.016285 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fd267e16-2c9d-4162-a66e-298e05cf96a4-utilities\") pod \"fd267e16-2c9d-4162-a66e-298e05cf96a4\" (UID: \"fd267e16-2c9d-4162-a66e-298e05cf96a4\") " Oct 01 14:23:28 crc kubenswrapper[4605]: I1001 14:23:28.016459 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxq6w\" (UniqueName: \"kubernetes.io/projected/fd267e16-2c9d-4162-a66e-298e05cf96a4-kube-api-access-wxq6w\") pod \"fd267e16-2c9d-4162-a66e-298e05cf96a4\" (UID: \"fd267e16-2c9d-4162-a66e-298e05cf96a4\") " Oct 01 14:23:28 crc kubenswrapper[4605]: I1001 14:23:28.018590 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fd267e16-2c9d-4162-a66e-298e05cf96a4-utilities" (OuterVolumeSpecName: "utilities") pod "fd267e16-2c9d-4162-a66e-298e05cf96a4" (UID: "fd267e16-2c9d-4162-a66e-298e05cf96a4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:23:28 crc kubenswrapper[4605]: I1001 14:23:28.025307 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fd267e16-2c9d-4162-a66e-298e05cf96a4-kube-api-access-wxq6w" (OuterVolumeSpecName: "kube-api-access-wxq6w") pod "fd267e16-2c9d-4162-a66e-298e05cf96a4" (UID: "fd267e16-2c9d-4162-a66e-298e05cf96a4"). InnerVolumeSpecName "kube-api-access-wxq6w". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:23:28 crc kubenswrapper[4605]: I1001 14:23:28.061232 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fd267e16-2c9d-4162-a66e-298e05cf96a4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fd267e16-2c9d-4162-a66e-298e05cf96a4" (UID: "fd267e16-2c9d-4162-a66e-298e05cf96a4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:23:28 crc kubenswrapper[4605]: I1001 14:23:28.117998 4605 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fd267e16-2c9d-4162-a66e-298e05cf96a4-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 14:23:28 crc kubenswrapper[4605]: I1001 14:23:28.118271 4605 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fd267e16-2c9d-4162-a66e-298e05cf96a4-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 14:23:28 crc kubenswrapper[4605]: I1001 14:23:28.118305 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxq6w\" (UniqueName: \"kubernetes.io/projected/fd267e16-2c9d-4162-a66e-298e05cf96a4-kube-api-access-wxq6w\") on node \"crc\" DevicePath \"\"" Oct 01 14:23:28 crc kubenswrapper[4605]: I1001 14:23:28.541354 4605 generic.go:334] "Generic (PLEG): container finished" podID="fd267e16-2c9d-4162-a66e-298e05cf96a4" containerID="ccf4bc5c0bd4a65c2d2fa7a267dc19b492f3e6ae9e2c54af4971b840abcfcc8b" exitCode=0 Oct 01 14:23:28 crc kubenswrapper[4605]: I1001 14:23:28.541414 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5jn5n" event={"ID":"fd267e16-2c9d-4162-a66e-298e05cf96a4","Type":"ContainerDied","Data":"ccf4bc5c0bd4a65c2d2fa7a267dc19b492f3e6ae9e2c54af4971b840abcfcc8b"} Oct 01 14:23:28 crc kubenswrapper[4605]: I1001 14:23:28.541446 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5jn5n" event={"ID":"fd267e16-2c9d-4162-a66e-298e05cf96a4","Type":"ContainerDied","Data":"d1c39cdac835dee6722ffb1d734eee1aca901050b736746bba163931fcd78615"} Oct 01 14:23:28 crc kubenswrapper[4605]: I1001 14:23:28.541469 4605 scope.go:117] "RemoveContainer" containerID="ccf4bc5c0bd4a65c2d2fa7a267dc19b492f3e6ae9e2c54af4971b840abcfcc8b" Oct 01 14:23:28 crc kubenswrapper[4605]: I1001 14:23:28.541509 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5jn5n" Oct 01 14:23:28 crc kubenswrapper[4605]: I1001 14:23:28.575126 4605 scope.go:117] "RemoveContainer" containerID="330e49c4834e59fb855ec26bb4b4b4fb2c050a05e69ca7a714c1c3576a43b736" Oct 01 14:23:28 crc kubenswrapper[4605]: I1001 14:23:28.584607 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5jn5n"] Oct 01 14:23:28 crc kubenswrapper[4605]: I1001 14:23:28.592523 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-5jn5n"] Oct 01 14:23:28 crc kubenswrapper[4605]: I1001 14:23:28.605922 4605 scope.go:117] "RemoveContainer" containerID="1bb0511cec520e940584cb080ffb44cf2f3d69690997292939aea3004c4a9c6a" Oct 01 14:23:28 crc kubenswrapper[4605]: I1001 14:23:28.646155 4605 scope.go:117] "RemoveContainer" containerID="ccf4bc5c0bd4a65c2d2fa7a267dc19b492f3e6ae9e2c54af4971b840abcfcc8b" Oct 01 14:23:28 crc kubenswrapper[4605]: E1001 14:23:28.646724 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ccf4bc5c0bd4a65c2d2fa7a267dc19b492f3e6ae9e2c54af4971b840abcfcc8b\": container with ID starting with ccf4bc5c0bd4a65c2d2fa7a267dc19b492f3e6ae9e2c54af4971b840abcfcc8b not found: ID does not exist" containerID="ccf4bc5c0bd4a65c2d2fa7a267dc19b492f3e6ae9e2c54af4971b840abcfcc8b" Oct 01 14:23:28 crc kubenswrapper[4605]: I1001 14:23:28.646755 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ccf4bc5c0bd4a65c2d2fa7a267dc19b492f3e6ae9e2c54af4971b840abcfcc8b"} err="failed to get container status \"ccf4bc5c0bd4a65c2d2fa7a267dc19b492f3e6ae9e2c54af4971b840abcfcc8b\": rpc error: code = NotFound desc = could not find container \"ccf4bc5c0bd4a65c2d2fa7a267dc19b492f3e6ae9e2c54af4971b840abcfcc8b\": container with ID starting with ccf4bc5c0bd4a65c2d2fa7a267dc19b492f3e6ae9e2c54af4971b840abcfcc8b not found: ID does not exist" Oct 01 14:23:28 crc kubenswrapper[4605]: I1001 14:23:28.646779 4605 scope.go:117] "RemoveContainer" containerID="330e49c4834e59fb855ec26bb4b4b4fb2c050a05e69ca7a714c1c3576a43b736" Oct 01 14:23:28 crc kubenswrapper[4605]: E1001 14:23:28.647246 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"330e49c4834e59fb855ec26bb4b4b4fb2c050a05e69ca7a714c1c3576a43b736\": container with ID starting with 330e49c4834e59fb855ec26bb4b4b4fb2c050a05e69ca7a714c1c3576a43b736 not found: ID does not exist" containerID="330e49c4834e59fb855ec26bb4b4b4fb2c050a05e69ca7a714c1c3576a43b736" Oct 01 14:23:28 crc kubenswrapper[4605]: I1001 14:23:28.647276 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"330e49c4834e59fb855ec26bb4b4b4fb2c050a05e69ca7a714c1c3576a43b736"} err="failed to get container status \"330e49c4834e59fb855ec26bb4b4b4fb2c050a05e69ca7a714c1c3576a43b736\": rpc error: code = NotFound desc = could not find container \"330e49c4834e59fb855ec26bb4b4b4fb2c050a05e69ca7a714c1c3576a43b736\": container with ID starting with 330e49c4834e59fb855ec26bb4b4b4fb2c050a05e69ca7a714c1c3576a43b736 not found: ID does not exist" Oct 01 14:23:28 crc kubenswrapper[4605]: I1001 14:23:28.647293 4605 scope.go:117] "RemoveContainer" containerID="1bb0511cec520e940584cb080ffb44cf2f3d69690997292939aea3004c4a9c6a" Oct 01 14:23:28 crc kubenswrapper[4605]: E1001 14:23:28.647619 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1bb0511cec520e940584cb080ffb44cf2f3d69690997292939aea3004c4a9c6a\": container with ID starting with 1bb0511cec520e940584cb080ffb44cf2f3d69690997292939aea3004c4a9c6a not found: ID does not exist" containerID="1bb0511cec520e940584cb080ffb44cf2f3d69690997292939aea3004c4a9c6a" Oct 01 14:23:28 crc kubenswrapper[4605]: I1001 14:23:28.647648 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1bb0511cec520e940584cb080ffb44cf2f3d69690997292939aea3004c4a9c6a"} err="failed to get container status \"1bb0511cec520e940584cb080ffb44cf2f3d69690997292939aea3004c4a9c6a\": rpc error: code = NotFound desc = could not find container \"1bb0511cec520e940584cb080ffb44cf2f3d69690997292939aea3004c4a9c6a\": container with ID starting with 1bb0511cec520e940584cb080ffb44cf2f3d69690997292939aea3004c4a9c6a not found: ID does not exist" Oct 01 14:23:29 crc kubenswrapper[4605]: I1001 14:23:29.926603 4605 scope.go:117] "RemoveContainer" containerID="67fab076341529dea5f6186b116af7d19b7aea3960811da63be1260baa5a897e" Oct 01 14:23:29 crc kubenswrapper[4605]: E1001 14:23:29.927282 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:23:29 crc kubenswrapper[4605]: I1001 14:23:29.937491 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fd267e16-2c9d-4162-a66e-298e05cf96a4" path="/var/lib/kubelet/pods/fd267e16-2c9d-4162-a66e-298e05cf96a4/volumes" Oct 01 14:23:44 crc kubenswrapper[4605]: I1001 14:23:44.926499 4605 scope.go:117] "RemoveContainer" containerID="67fab076341529dea5f6186b116af7d19b7aea3960811da63be1260baa5a897e" Oct 01 14:23:44 crc kubenswrapper[4605]: E1001 14:23:44.928590 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:23:56 crc kubenswrapper[4605]: I1001 14:23:56.926380 4605 scope.go:117] "RemoveContainer" containerID="67fab076341529dea5f6186b116af7d19b7aea3960811da63be1260baa5a897e" Oct 01 14:23:56 crc kubenswrapper[4605]: E1001 14:23:56.927073 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:24:09 crc kubenswrapper[4605]: I1001 14:24:09.927215 4605 scope.go:117] "RemoveContainer" containerID="67fab076341529dea5f6186b116af7d19b7aea3960811da63be1260baa5a897e" Oct 01 14:24:09 crc kubenswrapper[4605]: E1001 14:24:09.927928 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:24:20 crc kubenswrapper[4605]: I1001 14:24:20.926697 4605 scope.go:117] "RemoveContainer" containerID="67fab076341529dea5f6186b116af7d19b7aea3960811da63be1260baa5a897e" Oct 01 14:24:20 crc kubenswrapper[4605]: E1001 14:24:20.927512 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:24:33 crc kubenswrapper[4605]: I1001 14:24:33.927360 4605 scope.go:117] "RemoveContainer" containerID="67fab076341529dea5f6186b116af7d19b7aea3960811da63be1260baa5a897e" Oct 01 14:24:33 crc kubenswrapper[4605]: E1001 14:24:33.929460 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:24:44 crc kubenswrapper[4605]: I1001 14:24:44.926324 4605 scope.go:117] "RemoveContainer" containerID="67fab076341529dea5f6186b116af7d19b7aea3960811da63be1260baa5a897e" Oct 01 14:24:44 crc kubenswrapper[4605]: E1001 14:24:44.927013 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:24:56 crc kubenswrapper[4605]: I1001 14:24:56.926535 4605 scope.go:117] "RemoveContainer" containerID="67fab076341529dea5f6186b116af7d19b7aea3960811da63be1260baa5a897e" Oct 01 14:24:56 crc kubenswrapper[4605]: E1001 14:24:56.927395 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:25:11 crc kubenswrapper[4605]: I1001 14:25:11.927031 4605 scope.go:117] "RemoveContainer" containerID="67fab076341529dea5f6186b116af7d19b7aea3960811da63be1260baa5a897e" Oct 01 14:25:11 crc kubenswrapper[4605]: E1001 14:25:11.927850 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:25:24 crc kubenswrapper[4605]: I1001 14:25:24.926448 4605 scope.go:117] "RemoveContainer" containerID="67fab076341529dea5f6186b116af7d19b7aea3960811da63be1260baa5a897e" Oct 01 14:25:24 crc kubenswrapper[4605]: E1001 14:25:24.927138 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:25:30 crc kubenswrapper[4605]: I1001 14:25:30.639995 4605 generic.go:334] "Generic (PLEG): container finished" podID="9df7ad58-d542-4c8a-89fb-464689d1729c" containerID="093f75b30cdee52347b3409c3f00a44cf76f30a2b997f37af837e579b343c4f0" exitCode=0 Oct 01 14:25:30 crc kubenswrapper[4605]: I1001 14:25:30.640068 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-tlk9z" event={"ID":"9df7ad58-d542-4c8a-89fb-464689d1729c","Type":"ContainerDied","Data":"093f75b30cdee52347b3409c3f00a44cf76f30a2b997f37af837e579b343c4f0"} Oct 01 14:25:32 crc kubenswrapper[4605]: I1001 14:25:32.147603 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-tlk9z" Oct 01 14:25:32 crc kubenswrapper[4605]: I1001 14:25:32.241043 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/9df7ad58-d542-4c8a-89fb-464689d1729c-libvirt-secret-0\") pod \"9df7ad58-d542-4c8a-89fb-464689d1729c\" (UID: \"9df7ad58-d542-4c8a-89fb-464689d1729c\") " Oct 01 14:25:32 crc kubenswrapper[4605]: I1001 14:25:32.241144 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xwvmd\" (UniqueName: \"kubernetes.io/projected/9df7ad58-d542-4c8a-89fb-464689d1729c-kube-api-access-xwvmd\") pod \"9df7ad58-d542-4c8a-89fb-464689d1729c\" (UID: \"9df7ad58-d542-4c8a-89fb-464689d1729c\") " Oct 01 14:25:32 crc kubenswrapper[4605]: I1001 14:25:32.241300 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9df7ad58-d542-4c8a-89fb-464689d1729c-inventory\") pod \"9df7ad58-d542-4c8a-89fb-464689d1729c\" (UID: \"9df7ad58-d542-4c8a-89fb-464689d1729c\") " Oct 01 14:25:32 crc kubenswrapper[4605]: I1001 14:25:32.241381 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9df7ad58-d542-4c8a-89fb-464689d1729c-libvirt-combined-ca-bundle\") pod \"9df7ad58-d542-4c8a-89fb-464689d1729c\" (UID: \"9df7ad58-d542-4c8a-89fb-464689d1729c\") " Oct 01 14:25:32 crc kubenswrapper[4605]: I1001 14:25:32.241462 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9df7ad58-d542-4c8a-89fb-464689d1729c-ssh-key\") pod \"9df7ad58-d542-4c8a-89fb-464689d1729c\" (UID: \"9df7ad58-d542-4c8a-89fb-464689d1729c\") " Oct 01 14:25:32 crc kubenswrapper[4605]: I1001 14:25:32.247324 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9df7ad58-d542-4c8a-89fb-464689d1729c-kube-api-access-xwvmd" (OuterVolumeSpecName: "kube-api-access-xwvmd") pod "9df7ad58-d542-4c8a-89fb-464689d1729c" (UID: "9df7ad58-d542-4c8a-89fb-464689d1729c"). InnerVolumeSpecName "kube-api-access-xwvmd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:25:32 crc kubenswrapper[4605]: I1001 14:25:32.250642 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9df7ad58-d542-4c8a-89fb-464689d1729c-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "9df7ad58-d542-4c8a-89fb-464689d1729c" (UID: "9df7ad58-d542-4c8a-89fb-464689d1729c"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:25:32 crc kubenswrapper[4605]: I1001 14:25:32.271367 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9df7ad58-d542-4c8a-89fb-464689d1729c-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "9df7ad58-d542-4c8a-89fb-464689d1729c" (UID: "9df7ad58-d542-4c8a-89fb-464689d1729c"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:25:32 crc kubenswrapper[4605]: I1001 14:25:32.271782 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9df7ad58-d542-4c8a-89fb-464689d1729c-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "9df7ad58-d542-4c8a-89fb-464689d1729c" (UID: "9df7ad58-d542-4c8a-89fb-464689d1729c"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:25:32 crc kubenswrapper[4605]: I1001 14:25:32.274238 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9df7ad58-d542-4c8a-89fb-464689d1729c-inventory" (OuterVolumeSpecName: "inventory") pod "9df7ad58-d542-4c8a-89fb-464689d1729c" (UID: "9df7ad58-d542-4c8a-89fb-464689d1729c"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:25:32 crc kubenswrapper[4605]: I1001 14:25:32.343487 4605 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/9df7ad58-d542-4c8a-89fb-464689d1729c-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Oct 01 14:25:32 crc kubenswrapper[4605]: I1001 14:25:32.343510 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xwvmd\" (UniqueName: \"kubernetes.io/projected/9df7ad58-d542-4c8a-89fb-464689d1729c-kube-api-access-xwvmd\") on node \"crc\" DevicePath \"\"" Oct 01 14:25:32 crc kubenswrapper[4605]: I1001 14:25:32.343520 4605 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9df7ad58-d542-4c8a-89fb-464689d1729c-inventory\") on node \"crc\" DevicePath \"\"" Oct 01 14:25:32 crc kubenswrapper[4605]: I1001 14:25:32.343529 4605 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9df7ad58-d542-4c8a-89fb-464689d1729c-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 14:25:32 crc kubenswrapper[4605]: I1001 14:25:32.343537 4605 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9df7ad58-d542-4c8a-89fb-464689d1729c-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 01 14:25:32 crc kubenswrapper[4605]: I1001 14:25:32.659197 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-tlk9z" event={"ID":"9df7ad58-d542-4c8a-89fb-464689d1729c","Type":"ContainerDied","Data":"41982e3ec7aa26d61ff4eb5918414edb57fb942b489caa6ad86b448905f8d348"} Oct 01 14:25:32 crc kubenswrapper[4605]: I1001 14:25:32.659479 4605 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="41982e3ec7aa26d61ff4eb5918414edb57fb942b489caa6ad86b448905f8d348" Oct 01 14:25:32 crc kubenswrapper[4605]: I1001 14:25:32.659255 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-tlk9z" Oct 01 14:25:32 crc kubenswrapper[4605]: I1001 14:25:32.772206 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-v8zqd"] Oct 01 14:25:32 crc kubenswrapper[4605]: E1001 14:25:32.772610 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd267e16-2c9d-4162-a66e-298e05cf96a4" containerName="extract-utilities" Oct 01 14:25:32 crc kubenswrapper[4605]: I1001 14:25:32.772625 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd267e16-2c9d-4162-a66e-298e05cf96a4" containerName="extract-utilities" Oct 01 14:25:32 crc kubenswrapper[4605]: E1001 14:25:32.772634 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd267e16-2c9d-4162-a66e-298e05cf96a4" containerName="registry-server" Oct 01 14:25:32 crc kubenswrapper[4605]: I1001 14:25:32.772640 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd267e16-2c9d-4162-a66e-298e05cf96a4" containerName="registry-server" Oct 01 14:25:32 crc kubenswrapper[4605]: E1001 14:25:32.772650 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd267e16-2c9d-4162-a66e-298e05cf96a4" containerName="extract-content" Oct 01 14:25:32 crc kubenswrapper[4605]: I1001 14:25:32.772657 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd267e16-2c9d-4162-a66e-298e05cf96a4" containerName="extract-content" Oct 01 14:25:32 crc kubenswrapper[4605]: E1001 14:25:32.772691 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9df7ad58-d542-4c8a-89fb-464689d1729c" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Oct 01 14:25:32 crc kubenswrapper[4605]: I1001 14:25:32.772699 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="9df7ad58-d542-4c8a-89fb-464689d1729c" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Oct 01 14:25:32 crc kubenswrapper[4605]: I1001 14:25:32.772880 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="9df7ad58-d542-4c8a-89fb-464689d1729c" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Oct 01 14:25:32 crc kubenswrapper[4605]: I1001 14:25:32.772911 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd267e16-2c9d-4162-a66e-298e05cf96a4" containerName="registry-server" Oct 01 14:25:32 crc kubenswrapper[4605]: I1001 14:25:32.775407 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-v8zqd" Oct 01 14:25:32 crc kubenswrapper[4605]: I1001 14:25:32.778268 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-dpzpx" Oct 01 14:25:32 crc kubenswrapper[4605]: I1001 14:25:32.778556 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-extra-config" Oct 01 14:25:32 crc kubenswrapper[4605]: I1001 14:25:32.778832 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 01 14:25:32 crc kubenswrapper[4605]: I1001 14:25:32.779133 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Oct 01 14:25:32 crc kubenswrapper[4605]: I1001 14:25:32.780619 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 01 14:25:32 crc kubenswrapper[4605]: I1001 14:25:32.781412 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 01 14:25:32 crc kubenswrapper[4605]: I1001 14:25:32.789809 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Oct 01 14:25:32 crc kubenswrapper[4605]: I1001 14:25:32.790362 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-v8zqd"] Oct 01 14:25:32 crc kubenswrapper[4605]: I1001 14:25:32.953953 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tj4qc\" (UniqueName: \"kubernetes.io/projected/07ba7a39-5510-4075-b789-aa61ef2643f5-kube-api-access-tj4qc\") pod \"nova-edpm-deployment-openstack-edpm-ipam-v8zqd\" (UID: \"07ba7a39-5510-4075-b789-aa61ef2643f5\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-v8zqd" Oct 01 14:25:32 crc kubenswrapper[4605]: I1001 14:25:32.954301 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/07ba7a39-5510-4075-b789-aa61ef2643f5-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-v8zqd\" (UID: \"07ba7a39-5510-4075-b789-aa61ef2643f5\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-v8zqd" Oct 01 14:25:32 crc kubenswrapper[4605]: I1001 14:25:32.954486 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/07ba7a39-5510-4075-b789-aa61ef2643f5-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-v8zqd\" (UID: \"07ba7a39-5510-4075-b789-aa61ef2643f5\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-v8zqd" Oct 01 14:25:32 crc kubenswrapper[4605]: I1001 14:25:32.954607 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/07ba7a39-5510-4075-b789-aa61ef2643f5-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-v8zqd\" (UID: \"07ba7a39-5510-4075-b789-aa61ef2643f5\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-v8zqd" Oct 01 14:25:32 crc kubenswrapper[4605]: I1001 14:25:32.954720 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/07ba7a39-5510-4075-b789-aa61ef2643f5-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-v8zqd\" (UID: \"07ba7a39-5510-4075-b789-aa61ef2643f5\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-v8zqd" Oct 01 14:25:32 crc kubenswrapper[4605]: I1001 14:25:32.954877 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/07ba7a39-5510-4075-b789-aa61ef2643f5-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-v8zqd\" (UID: \"07ba7a39-5510-4075-b789-aa61ef2643f5\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-v8zqd" Oct 01 14:25:32 crc kubenswrapper[4605]: I1001 14:25:32.955014 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/07ba7a39-5510-4075-b789-aa61ef2643f5-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-v8zqd\" (UID: \"07ba7a39-5510-4075-b789-aa61ef2643f5\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-v8zqd" Oct 01 14:25:32 crc kubenswrapper[4605]: I1001 14:25:32.955197 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/07ba7a39-5510-4075-b789-aa61ef2643f5-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-v8zqd\" (UID: \"07ba7a39-5510-4075-b789-aa61ef2643f5\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-v8zqd" Oct 01 14:25:32 crc kubenswrapper[4605]: I1001 14:25:32.955351 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/07ba7a39-5510-4075-b789-aa61ef2643f5-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-v8zqd\" (UID: \"07ba7a39-5510-4075-b789-aa61ef2643f5\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-v8zqd" Oct 01 14:25:33 crc kubenswrapper[4605]: I1001 14:25:33.057565 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tj4qc\" (UniqueName: \"kubernetes.io/projected/07ba7a39-5510-4075-b789-aa61ef2643f5-kube-api-access-tj4qc\") pod \"nova-edpm-deployment-openstack-edpm-ipam-v8zqd\" (UID: \"07ba7a39-5510-4075-b789-aa61ef2643f5\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-v8zqd" Oct 01 14:25:33 crc kubenswrapper[4605]: I1001 14:25:33.057628 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/07ba7a39-5510-4075-b789-aa61ef2643f5-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-v8zqd\" (UID: \"07ba7a39-5510-4075-b789-aa61ef2643f5\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-v8zqd" Oct 01 14:25:33 crc kubenswrapper[4605]: I1001 14:25:33.057699 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/07ba7a39-5510-4075-b789-aa61ef2643f5-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-v8zqd\" (UID: \"07ba7a39-5510-4075-b789-aa61ef2643f5\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-v8zqd" Oct 01 14:25:33 crc kubenswrapper[4605]: I1001 14:25:33.057720 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/07ba7a39-5510-4075-b789-aa61ef2643f5-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-v8zqd\" (UID: \"07ba7a39-5510-4075-b789-aa61ef2643f5\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-v8zqd" Oct 01 14:25:33 crc kubenswrapper[4605]: I1001 14:25:33.057754 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/07ba7a39-5510-4075-b789-aa61ef2643f5-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-v8zqd\" (UID: \"07ba7a39-5510-4075-b789-aa61ef2643f5\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-v8zqd" Oct 01 14:25:33 crc kubenswrapper[4605]: I1001 14:25:33.057822 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/07ba7a39-5510-4075-b789-aa61ef2643f5-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-v8zqd\" (UID: \"07ba7a39-5510-4075-b789-aa61ef2643f5\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-v8zqd" Oct 01 14:25:33 crc kubenswrapper[4605]: I1001 14:25:33.057841 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/07ba7a39-5510-4075-b789-aa61ef2643f5-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-v8zqd\" (UID: \"07ba7a39-5510-4075-b789-aa61ef2643f5\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-v8zqd" Oct 01 14:25:33 crc kubenswrapper[4605]: I1001 14:25:33.057906 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/07ba7a39-5510-4075-b789-aa61ef2643f5-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-v8zqd\" (UID: \"07ba7a39-5510-4075-b789-aa61ef2643f5\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-v8zqd" Oct 01 14:25:33 crc kubenswrapper[4605]: I1001 14:25:33.057945 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/07ba7a39-5510-4075-b789-aa61ef2643f5-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-v8zqd\" (UID: \"07ba7a39-5510-4075-b789-aa61ef2643f5\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-v8zqd" Oct 01 14:25:33 crc kubenswrapper[4605]: I1001 14:25:33.061570 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/07ba7a39-5510-4075-b789-aa61ef2643f5-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-v8zqd\" (UID: \"07ba7a39-5510-4075-b789-aa61ef2643f5\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-v8zqd" Oct 01 14:25:33 crc kubenswrapper[4605]: I1001 14:25:33.062525 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/07ba7a39-5510-4075-b789-aa61ef2643f5-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-v8zqd\" (UID: \"07ba7a39-5510-4075-b789-aa61ef2643f5\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-v8zqd" Oct 01 14:25:33 crc kubenswrapper[4605]: I1001 14:25:33.062648 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/07ba7a39-5510-4075-b789-aa61ef2643f5-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-v8zqd\" (UID: \"07ba7a39-5510-4075-b789-aa61ef2643f5\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-v8zqd" Oct 01 14:25:33 crc kubenswrapper[4605]: I1001 14:25:33.063447 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/07ba7a39-5510-4075-b789-aa61ef2643f5-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-v8zqd\" (UID: \"07ba7a39-5510-4075-b789-aa61ef2643f5\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-v8zqd" Oct 01 14:25:33 crc kubenswrapper[4605]: I1001 14:25:33.064070 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/07ba7a39-5510-4075-b789-aa61ef2643f5-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-v8zqd\" (UID: \"07ba7a39-5510-4075-b789-aa61ef2643f5\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-v8zqd" Oct 01 14:25:33 crc kubenswrapper[4605]: I1001 14:25:33.064936 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/07ba7a39-5510-4075-b789-aa61ef2643f5-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-v8zqd\" (UID: \"07ba7a39-5510-4075-b789-aa61ef2643f5\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-v8zqd" Oct 01 14:25:33 crc kubenswrapper[4605]: I1001 14:25:33.065220 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/07ba7a39-5510-4075-b789-aa61ef2643f5-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-v8zqd\" (UID: \"07ba7a39-5510-4075-b789-aa61ef2643f5\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-v8zqd" Oct 01 14:25:33 crc kubenswrapper[4605]: I1001 14:25:33.065508 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/07ba7a39-5510-4075-b789-aa61ef2643f5-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-v8zqd\" (UID: \"07ba7a39-5510-4075-b789-aa61ef2643f5\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-v8zqd" Oct 01 14:25:33 crc kubenswrapper[4605]: I1001 14:25:33.077379 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tj4qc\" (UniqueName: \"kubernetes.io/projected/07ba7a39-5510-4075-b789-aa61ef2643f5-kube-api-access-tj4qc\") pod \"nova-edpm-deployment-openstack-edpm-ipam-v8zqd\" (UID: \"07ba7a39-5510-4075-b789-aa61ef2643f5\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-v8zqd" Oct 01 14:25:33 crc kubenswrapper[4605]: I1001 14:25:33.115922 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-v8zqd" Oct 01 14:25:33 crc kubenswrapper[4605]: I1001 14:25:33.645769 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-v8zqd"] Oct 01 14:25:33 crc kubenswrapper[4605]: I1001 14:25:33.672587 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-v8zqd" event={"ID":"07ba7a39-5510-4075-b789-aa61ef2643f5","Type":"ContainerStarted","Data":"18c315b0555838bcde85c22ae52ca6ef652cd5eaa8e52bbaf190fee52d86b807"} Oct 01 14:25:34 crc kubenswrapper[4605]: I1001 14:25:34.684910 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-v8zqd" event={"ID":"07ba7a39-5510-4075-b789-aa61ef2643f5","Type":"ContainerStarted","Data":"f8de87fe5cd1770b4106a4fcc7cc221c39575cbe813e1ca40ad97b91ad633aac"} Oct 01 14:25:34 crc kubenswrapper[4605]: I1001 14:25:34.707427 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-v8zqd" podStartSLOduration=2.152343491 podStartE2EDuration="2.707401789s" podCreationTimestamp="2025-10-01 14:25:32 +0000 UTC" firstStartedPulling="2025-10-01 14:25:33.628501149 +0000 UTC m=+2456.372477367" lastFinishedPulling="2025-10-01 14:25:34.183559457 +0000 UTC m=+2456.927535665" observedRunningTime="2025-10-01 14:25:34.704159317 +0000 UTC m=+2457.448135525" watchObservedRunningTime="2025-10-01 14:25:34.707401789 +0000 UTC m=+2457.451378007" Oct 01 14:25:35 crc kubenswrapper[4605]: I1001 14:25:35.926774 4605 scope.go:117] "RemoveContainer" containerID="67fab076341529dea5f6186b116af7d19b7aea3960811da63be1260baa5a897e" Oct 01 14:25:35 crc kubenswrapper[4605]: E1001 14:25:35.927024 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:25:47 crc kubenswrapper[4605]: I1001 14:25:47.932605 4605 scope.go:117] "RemoveContainer" containerID="67fab076341529dea5f6186b116af7d19b7aea3960811da63be1260baa5a897e" Oct 01 14:25:47 crc kubenswrapper[4605]: E1001 14:25:47.933341 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:25:58 crc kubenswrapper[4605]: I1001 14:25:58.926984 4605 scope.go:117] "RemoveContainer" containerID="67fab076341529dea5f6186b116af7d19b7aea3960811da63be1260baa5a897e" Oct 01 14:25:58 crc kubenswrapper[4605]: E1001 14:25:58.927978 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:26:10 crc kubenswrapper[4605]: I1001 14:26:10.927245 4605 scope.go:117] "RemoveContainer" containerID="67fab076341529dea5f6186b116af7d19b7aea3960811da63be1260baa5a897e" Oct 01 14:26:10 crc kubenswrapper[4605]: E1001 14:26:10.928062 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:26:24 crc kubenswrapper[4605]: I1001 14:26:24.927505 4605 scope.go:117] "RemoveContainer" containerID="67fab076341529dea5f6186b116af7d19b7aea3960811da63be1260baa5a897e" Oct 01 14:26:24 crc kubenswrapper[4605]: E1001 14:26:24.928381 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:26:38 crc kubenswrapper[4605]: I1001 14:26:38.926897 4605 scope.go:117] "RemoveContainer" containerID="67fab076341529dea5f6186b116af7d19b7aea3960811da63be1260baa5a897e" Oct 01 14:26:38 crc kubenswrapper[4605]: E1001 14:26:38.927645 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:26:51 crc kubenswrapper[4605]: I1001 14:26:51.926548 4605 scope.go:117] "RemoveContainer" containerID="67fab076341529dea5f6186b116af7d19b7aea3960811da63be1260baa5a897e" Oct 01 14:26:51 crc kubenswrapper[4605]: E1001 14:26:51.927487 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:27:02 crc kubenswrapper[4605]: I1001 14:27:02.926767 4605 scope.go:117] "RemoveContainer" containerID="67fab076341529dea5f6186b116af7d19b7aea3960811da63be1260baa5a897e" Oct 01 14:27:02 crc kubenswrapper[4605]: E1001 14:27:02.928828 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:27:14 crc kubenswrapper[4605]: I1001 14:27:14.926800 4605 scope.go:117] "RemoveContainer" containerID="67fab076341529dea5f6186b116af7d19b7aea3960811da63be1260baa5a897e" Oct 01 14:27:14 crc kubenswrapper[4605]: E1001 14:27:14.927757 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:27:25 crc kubenswrapper[4605]: I1001 14:27:25.926737 4605 scope.go:117] "RemoveContainer" containerID="67fab076341529dea5f6186b116af7d19b7aea3960811da63be1260baa5a897e" Oct 01 14:27:25 crc kubenswrapper[4605]: E1001 14:27:25.927455 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:27:39 crc kubenswrapper[4605]: I1001 14:27:39.927230 4605 scope.go:117] "RemoveContainer" containerID="67fab076341529dea5f6186b116af7d19b7aea3960811da63be1260baa5a897e" Oct 01 14:27:39 crc kubenswrapper[4605]: E1001 14:27:39.928182 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:27:54 crc kubenswrapper[4605]: I1001 14:27:54.926851 4605 scope.go:117] "RemoveContainer" containerID="67fab076341529dea5f6186b116af7d19b7aea3960811da63be1260baa5a897e" Oct 01 14:27:55 crc kubenswrapper[4605]: I1001 14:27:55.955439 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" event={"ID":"f3023060-c8ae-492b-b1cb-a418d9a8e59f","Type":"ContainerStarted","Data":"2b12842fc21dc955a6e9c6b86d51ef221646a4c444d7a262b1bedd2def659168"} Oct 01 14:29:04 crc kubenswrapper[4605]: I1001 14:29:04.585897 4605 generic.go:334] "Generic (PLEG): container finished" podID="07ba7a39-5510-4075-b789-aa61ef2643f5" containerID="f8de87fe5cd1770b4106a4fcc7cc221c39575cbe813e1ca40ad97b91ad633aac" exitCode=0 Oct 01 14:29:04 crc kubenswrapper[4605]: I1001 14:29:04.585975 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-v8zqd" event={"ID":"07ba7a39-5510-4075-b789-aa61ef2643f5","Type":"ContainerDied","Data":"f8de87fe5cd1770b4106a4fcc7cc221c39575cbe813e1ca40ad97b91ad633aac"} Oct 01 14:29:06 crc kubenswrapper[4605]: I1001 14:29:06.045229 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-v8zqd" Oct 01 14:29:06 crc kubenswrapper[4605]: I1001 14:29:06.241343 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/07ba7a39-5510-4075-b789-aa61ef2643f5-inventory\") pod \"07ba7a39-5510-4075-b789-aa61ef2643f5\" (UID: \"07ba7a39-5510-4075-b789-aa61ef2643f5\") " Oct 01 14:29:06 crc kubenswrapper[4605]: I1001 14:29:06.241460 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/07ba7a39-5510-4075-b789-aa61ef2643f5-nova-cell1-compute-config-1\") pod \"07ba7a39-5510-4075-b789-aa61ef2643f5\" (UID: \"07ba7a39-5510-4075-b789-aa61ef2643f5\") " Oct 01 14:29:06 crc kubenswrapper[4605]: I1001 14:29:06.241483 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tj4qc\" (UniqueName: \"kubernetes.io/projected/07ba7a39-5510-4075-b789-aa61ef2643f5-kube-api-access-tj4qc\") pod \"07ba7a39-5510-4075-b789-aa61ef2643f5\" (UID: \"07ba7a39-5510-4075-b789-aa61ef2643f5\") " Oct 01 14:29:06 crc kubenswrapper[4605]: I1001 14:29:06.242397 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/07ba7a39-5510-4075-b789-aa61ef2643f5-nova-migration-ssh-key-0\") pod \"07ba7a39-5510-4075-b789-aa61ef2643f5\" (UID: \"07ba7a39-5510-4075-b789-aa61ef2643f5\") " Oct 01 14:29:06 crc kubenswrapper[4605]: I1001 14:29:06.242568 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/07ba7a39-5510-4075-b789-aa61ef2643f5-ssh-key\") pod \"07ba7a39-5510-4075-b789-aa61ef2643f5\" (UID: \"07ba7a39-5510-4075-b789-aa61ef2643f5\") " Oct 01 14:29:06 crc kubenswrapper[4605]: I1001 14:29:06.242609 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/07ba7a39-5510-4075-b789-aa61ef2643f5-nova-cell1-compute-config-0\") pod \"07ba7a39-5510-4075-b789-aa61ef2643f5\" (UID: \"07ba7a39-5510-4075-b789-aa61ef2643f5\") " Oct 01 14:29:06 crc kubenswrapper[4605]: I1001 14:29:06.242646 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/07ba7a39-5510-4075-b789-aa61ef2643f5-nova-combined-ca-bundle\") pod \"07ba7a39-5510-4075-b789-aa61ef2643f5\" (UID: \"07ba7a39-5510-4075-b789-aa61ef2643f5\") " Oct 01 14:29:06 crc kubenswrapper[4605]: I1001 14:29:06.242682 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/07ba7a39-5510-4075-b789-aa61ef2643f5-nova-migration-ssh-key-1\") pod \"07ba7a39-5510-4075-b789-aa61ef2643f5\" (UID: \"07ba7a39-5510-4075-b789-aa61ef2643f5\") " Oct 01 14:29:06 crc kubenswrapper[4605]: I1001 14:29:06.242745 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/07ba7a39-5510-4075-b789-aa61ef2643f5-nova-extra-config-0\") pod \"07ba7a39-5510-4075-b789-aa61ef2643f5\" (UID: \"07ba7a39-5510-4075-b789-aa61ef2643f5\") " Oct 01 14:29:06 crc kubenswrapper[4605]: I1001 14:29:06.249194 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/07ba7a39-5510-4075-b789-aa61ef2643f5-kube-api-access-tj4qc" (OuterVolumeSpecName: "kube-api-access-tj4qc") pod "07ba7a39-5510-4075-b789-aa61ef2643f5" (UID: "07ba7a39-5510-4075-b789-aa61ef2643f5"). InnerVolumeSpecName "kube-api-access-tj4qc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:29:06 crc kubenswrapper[4605]: I1001 14:29:06.271818 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/07ba7a39-5510-4075-b789-aa61ef2643f5-nova-extra-config-0" (OuterVolumeSpecName: "nova-extra-config-0") pod "07ba7a39-5510-4075-b789-aa61ef2643f5" (UID: "07ba7a39-5510-4075-b789-aa61ef2643f5"). InnerVolumeSpecName "nova-extra-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:29:06 crc kubenswrapper[4605]: I1001 14:29:06.277366 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/07ba7a39-5510-4075-b789-aa61ef2643f5-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "07ba7a39-5510-4075-b789-aa61ef2643f5" (UID: "07ba7a39-5510-4075-b789-aa61ef2643f5"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:29:06 crc kubenswrapper[4605]: I1001 14:29:06.281329 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/07ba7a39-5510-4075-b789-aa61ef2643f5-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "07ba7a39-5510-4075-b789-aa61ef2643f5" (UID: "07ba7a39-5510-4075-b789-aa61ef2643f5"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:29:06 crc kubenswrapper[4605]: I1001 14:29:06.288616 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/07ba7a39-5510-4075-b789-aa61ef2643f5-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "07ba7a39-5510-4075-b789-aa61ef2643f5" (UID: "07ba7a39-5510-4075-b789-aa61ef2643f5"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:29:06 crc kubenswrapper[4605]: I1001 14:29:06.292403 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/07ba7a39-5510-4075-b789-aa61ef2643f5-inventory" (OuterVolumeSpecName: "inventory") pod "07ba7a39-5510-4075-b789-aa61ef2643f5" (UID: "07ba7a39-5510-4075-b789-aa61ef2643f5"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:29:06 crc kubenswrapper[4605]: I1001 14:29:06.293276 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/07ba7a39-5510-4075-b789-aa61ef2643f5-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "07ba7a39-5510-4075-b789-aa61ef2643f5" (UID: "07ba7a39-5510-4075-b789-aa61ef2643f5"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:29:06 crc kubenswrapper[4605]: I1001 14:29:06.315423 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/07ba7a39-5510-4075-b789-aa61ef2643f5-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "07ba7a39-5510-4075-b789-aa61ef2643f5" (UID: "07ba7a39-5510-4075-b789-aa61ef2643f5"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:29:06 crc kubenswrapper[4605]: I1001 14:29:06.319285 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/07ba7a39-5510-4075-b789-aa61ef2643f5-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "07ba7a39-5510-4075-b789-aa61ef2643f5" (UID: "07ba7a39-5510-4075-b789-aa61ef2643f5"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:29:06 crc kubenswrapper[4605]: I1001 14:29:06.344968 4605 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/07ba7a39-5510-4075-b789-aa61ef2643f5-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 01 14:29:06 crc kubenswrapper[4605]: I1001 14:29:06.345005 4605 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/07ba7a39-5510-4075-b789-aa61ef2643f5-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Oct 01 14:29:06 crc kubenswrapper[4605]: I1001 14:29:06.345016 4605 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/07ba7a39-5510-4075-b789-aa61ef2643f5-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 14:29:06 crc kubenswrapper[4605]: I1001 14:29:06.345024 4605 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/07ba7a39-5510-4075-b789-aa61ef2643f5-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Oct 01 14:29:06 crc kubenswrapper[4605]: I1001 14:29:06.345033 4605 reconciler_common.go:293] "Volume detached for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/07ba7a39-5510-4075-b789-aa61ef2643f5-nova-extra-config-0\") on node \"crc\" DevicePath \"\"" Oct 01 14:29:06 crc kubenswrapper[4605]: I1001 14:29:06.345041 4605 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/07ba7a39-5510-4075-b789-aa61ef2643f5-inventory\") on node \"crc\" DevicePath \"\"" Oct 01 14:29:06 crc kubenswrapper[4605]: I1001 14:29:06.345050 4605 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/07ba7a39-5510-4075-b789-aa61ef2643f5-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Oct 01 14:29:06 crc kubenswrapper[4605]: I1001 14:29:06.345059 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tj4qc\" (UniqueName: \"kubernetes.io/projected/07ba7a39-5510-4075-b789-aa61ef2643f5-kube-api-access-tj4qc\") on node \"crc\" DevicePath \"\"" Oct 01 14:29:06 crc kubenswrapper[4605]: I1001 14:29:06.345066 4605 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/07ba7a39-5510-4075-b789-aa61ef2643f5-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Oct 01 14:29:06 crc kubenswrapper[4605]: I1001 14:29:06.605415 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-v8zqd" event={"ID":"07ba7a39-5510-4075-b789-aa61ef2643f5","Type":"ContainerDied","Data":"18c315b0555838bcde85c22ae52ca6ef652cd5eaa8e52bbaf190fee52d86b807"} Oct 01 14:29:06 crc kubenswrapper[4605]: I1001 14:29:06.605473 4605 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="18c315b0555838bcde85c22ae52ca6ef652cd5eaa8e52bbaf190fee52d86b807" Oct 01 14:29:06 crc kubenswrapper[4605]: I1001 14:29:06.605544 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-v8zqd" Oct 01 14:29:06 crc kubenswrapper[4605]: I1001 14:29:06.803361 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-6bd7l"] Oct 01 14:29:06 crc kubenswrapper[4605]: E1001 14:29:06.803754 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="07ba7a39-5510-4075-b789-aa61ef2643f5" containerName="nova-edpm-deployment-openstack-edpm-ipam" Oct 01 14:29:06 crc kubenswrapper[4605]: I1001 14:29:06.803770 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="07ba7a39-5510-4075-b789-aa61ef2643f5" containerName="nova-edpm-deployment-openstack-edpm-ipam" Oct 01 14:29:06 crc kubenswrapper[4605]: I1001 14:29:06.803975 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="07ba7a39-5510-4075-b789-aa61ef2643f5" containerName="nova-edpm-deployment-openstack-edpm-ipam" Oct 01 14:29:06 crc kubenswrapper[4605]: I1001 14:29:06.804976 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-6bd7l" Oct 01 14:29:06 crc kubenswrapper[4605]: I1001 14:29:06.807625 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-compute-config-data" Oct 01 14:29:06 crc kubenswrapper[4605]: I1001 14:29:06.808207 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 01 14:29:06 crc kubenswrapper[4605]: I1001 14:29:06.808331 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 01 14:29:06 crc kubenswrapper[4605]: I1001 14:29:06.818084 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-dpzpx" Oct 01 14:29:06 crc kubenswrapper[4605]: I1001 14:29:06.818381 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 01 14:29:06 crc kubenswrapper[4605]: I1001 14:29:06.826377 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-6bd7l"] Oct 01 14:29:06 crc kubenswrapper[4605]: I1001 14:29:06.855888 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/58600359-0fa8-4801-a1d3-87598ba13651-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-6bd7l\" (UID: \"58600359-0fa8-4801-a1d3-87598ba13651\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-6bd7l" Oct 01 14:29:06 crc kubenswrapper[4605]: I1001 14:29:06.856270 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/58600359-0fa8-4801-a1d3-87598ba13651-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-6bd7l\" (UID: \"58600359-0fa8-4801-a1d3-87598ba13651\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-6bd7l" Oct 01 14:29:06 crc kubenswrapper[4605]: I1001 14:29:06.856366 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kxz77\" (UniqueName: \"kubernetes.io/projected/58600359-0fa8-4801-a1d3-87598ba13651-kube-api-access-kxz77\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-6bd7l\" (UID: \"58600359-0fa8-4801-a1d3-87598ba13651\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-6bd7l" Oct 01 14:29:06 crc kubenswrapper[4605]: I1001 14:29:06.856486 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58600359-0fa8-4801-a1d3-87598ba13651-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-6bd7l\" (UID: \"58600359-0fa8-4801-a1d3-87598ba13651\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-6bd7l" Oct 01 14:29:06 crc kubenswrapper[4605]: I1001 14:29:06.856650 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/58600359-0fa8-4801-a1d3-87598ba13651-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-6bd7l\" (UID: \"58600359-0fa8-4801-a1d3-87598ba13651\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-6bd7l" Oct 01 14:29:06 crc kubenswrapper[4605]: I1001 14:29:06.856769 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/58600359-0fa8-4801-a1d3-87598ba13651-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-6bd7l\" (UID: \"58600359-0fa8-4801-a1d3-87598ba13651\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-6bd7l" Oct 01 14:29:06 crc kubenswrapper[4605]: I1001 14:29:06.856857 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/58600359-0fa8-4801-a1d3-87598ba13651-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-6bd7l\" (UID: \"58600359-0fa8-4801-a1d3-87598ba13651\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-6bd7l" Oct 01 14:29:06 crc kubenswrapper[4605]: I1001 14:29:06.958632 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/58600359-0fa8-4801-a1d3-87598ba13651-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-6bd7l\" (UID: \"58600359-0fa8-4801-a1d3-87598ba13651\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-6bd7l" Oct 01 14:29:06 crc kubenswrapper[4605]: I1001 14:29:06.958980 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/58600359-0fa8-4801-a1d3-87598ba13651-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-6bd7l\" (UID: \"58600359-0fa8-4801-a1d3-87598ba13651\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-6bd7l" Oct 01 14:29:06 crc kubenswrapper[4605]: I1001 14:29:06.960387 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/58600359-0fa8-4801-a1d3-87598ba13651-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-6bd7l\" (UID: \"58600359-0fa8-4801-a1d3-87598ba13651\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-6bd7l" Oct 01 14:29:06 crc kubenswrapper[4605]: I1001 14:29:06.960721 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/58600359-0fa8-4801-a1d3-87598ba13651-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-6bd7l\" (UID: \"58600359-0fa8-4801-a1d3-87598ba13651\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-6bd7l" Oct 01 14:29:06 crc kubenswrapper[4605]: I1001 14:29:06.961256 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/58600359-0fa8-4801-a1d3-87598ba13651-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-6bd7l\" (UID: \"58600359-0fa8-4801-a1d3-87598ba13651\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-6bd7l" Oct 01 14:29:06 crc kubenswrapper[4605]: I1001 14:29:06.961422 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kxz77\" (UniqueName: \"kubernetes.io/projected/58600359-0fa8-4801-a1d3-87598ba13651-kube-api-access-kxz77\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-6bd7l\" (UID: \"58600359-0fa8-4801-a1d3-87598ba13651\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-6bd7l" Oct 01 14:29:06 crc kubenswrapper[4605]: I1001 14:29:06.961587 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58600359-0fa8-4801-a1d3-87598ba13651-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-6bd7l\" (UID: \"58600359-0fa8-4801-a1d3-87598ba13651\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-6bd7l" Oct 01 14:29:06 crc kubenswrapper[4605]: I1001 14:29:06.969163 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/58600359-0fa8-4801-a1d3-87598ba13651-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-6bd7l\" (UID: \"58600359-0fa8-4801-a1d3-87598ba13651\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-6bd7l" Oct 01 14:29:06 crc kubenswrapper[4605]: I1001 14:29:06.969154 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/58600359-0fa8-4801-a1d3-87598ba13651-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-6bd7l\" (UID: \"58600359-0fa8-4801-a1d3-87598ba13651\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-6bd7l" Oct 01 14:29:06 crc kubenswrapper[4605]: I1001 14:29:06.969224 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58600359-0fa8-4801-a1d3-87598ba13651-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-6bd7l\" (UID: \"58600359-0fa8-4801-a1d3-87598ba13651\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-6bd7l" Oct 01 14:29:06 crc kubenswrapper[4605]: I1001 14:29:06.970766 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/58600359-0fa8-4801-a1d3-87598ba13651-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-6bd7l\" (UID: \"58600359-0fa8-4801-a1d3-87598ba13651\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-6bd7l" Oct 01 14:29:06 crc kubenswrapper[4605]: I1001 14:29:06.970991 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/58600359-0fa8-4801-a1d3-87598ba13651-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-6bd7l\" (UID: \"58600359-0fa8-4801-a1d3-87598ba13651\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-6bd7l" Oct 01 14:29:06 crc kubenswrapper[4605]: I1001 14:29:06.971433 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/58600359-0fa8-4801-a1d3-87598ba13651-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-6bd7l\" (UID: \"58600359-0fa8-4801-a1d3-87598ba13651\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-6bd7l" Oct 01 14:29:06 crc kubenswrapper[4605]: I1001 14:29:06.977155 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kxz77\" (UniqueName: \"kubernetes.io/projected/58600359-0fa8-4801-a1d3-87598ba13651-kube-api-access-kxz77\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-6bd7l\" (UID: \"58600359-0fa8-4801-a1d3-87598ba13651\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-6bd7l" Oct 01 14:29:07 crc kubenswrapper[4605]: I1001 14:29:07.131296 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-6bd7l" Oct 01 14:29:07 crc kubenswrapper[4605]: I1001 14:29:07.663066 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-6bd7l"] Oct 01 14:29:07 crc kubenswrapper[4605]: I1001 14:29:07.676220 4605 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 01 14:29:08 crc kubenswrapper[4605]: I1001 14:29:08.624139 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-6bd7l" event={"ID":"58600359-0fa8-4801-a1d3-87598ba13651","Type":"ContainerStarted","Data":"d1ad867854729efc999e2cac699d410e749f09b74e474857bf5c033580d43ddc"} Oct 01 14:29:08 crc kubenswrapper[4605]: I1001 14:29:08.625334 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-6bd7l" event={"ID":"58600359-0fa8-4801-a1d3-87598ba13651","Type":"ContainerStarted","Data":"aac97b40718047f143ea2a7beb845ece94aadd554ba873d1ce7a40983547155b"} Oct 01 14:29:08 crc kubenswrapper[4605]: I1001 14:29:08.665417 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-6bd7l" podStartSLOduration=2.257326617 podStartE2EDuration="2.665394623s" podCreationTimestamp="2025-10-01 14:29:06 +0000 UTC" firstStartedPulling="2025-10-01 14:29:07.675983971 +0000 UTC m=+2670.419960169" lastFinishedPulling="2025-10-01 14:29:08.084051967 +0000 UTC m=+2670.828028175" observedRunningTime="2025-10-01 14:29:08.653630637 +0000 UTC m=+2671.397606845" watchObservedRunningTime="2025-10-01 14:29:08.665394623 +0000 UTC m=+2671.409370831" Oct 01 14:29:53 crc kubenswrapper[4605]: I1001 14:29:53.389170 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-sp4wt"] Oct 01 14:29:53 crc kubenswrapper[4605]: I1001 14:29:53.392201 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sp4wt" Oct 01 14:29:53 crc kubenswrapper[4605]: I1001 14:29:53.411761 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-sp4wt"] Oct 01 14:29:53 crc kubenswrapper[4605]: I1001 14:29:53.462776 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mk4p7\" (UniqueName: \"kubernetes.io/projected/1aee75b7-664c-423d-91f8-7cb48ab1273e-kube-api-access-mk4p7\") pod \"redhat-marketplace-sp4wt\" (UID: \"1aee75b7-664c-423d-91f8-7cb48ab1273e\") " pod="openshift-marketplace/redhat-marketplace-sp4wt" Oct 01 14:29:53 crc kubenswrapper[4605]: I1001 14:29:53.463161 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1aee75b7-664c-423d-91f8-7cb48ab1273e-catalog-content\") pod \"redhat-marketplace-sp4wt\" (UID: \"1aee75b7-664c-423d-91f8-7cb48ab1273e\") " pod="openshift-marketplace/redhat-marketplace-sp4wt" Oct 01 14:29:53 crc kubenswrapper[4605]: I1001 14:29:53.463337 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1aee75b7-664c-423d-91f8-7cb48ab1273e-utilities\") pod \"redhat-marketplace-sp4wt\" (UID: \"1aee75b7-664c-423d-91f8-7cb48ab1273e\") " pod="openshift-marketplace/redhat-marketplace-sp4wt" Oct 01 14:29:53 crc kubenswrapper[4605]: I1001 14:29:53.565137 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1aee75b7-664c-423d-91f8-7cb48ab1273e-utilities\") pod \"redhat-marketplace-sp4wt\" (UID: \"1aee75b7-664c-423d-91f8-7cb48ab1273e\") " pod="openshift-marketplace/redhat-marketplace-sp4wt" Oct 01 14:29:53 crc kubenswrapper[4605]: I1001 14:29:53.565215 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mk4p7\" (UniqueName: \"kubernetes.io/projected/1aee75b7-664c-423d-91f8-7cb48ab1273e-kube-api-access-mk4p7\") pod \"redhat-marketplace-sp4wt\" (UID: \"1aee75b7-664c-423d-91f8-7cb48ab1273e\") " pod="openshift-marketplace/redhat-marketplace-sp4wt" Oct 01 14:29:53 crc kubenswrapper[4605]: I1001 14:29:53.565378 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1aee75b7-664c-423d-91f8-7cb48ab1273e-catalog-content\") pod \"redhat-marketplace-sp4wt\" (UID: \"1aee75b7-664c-423d-91f8-7cb48ab1273e\") " pod="openshift-marketplace/redhat-marketplace-sp4wt" Oct 01 14:29:53 crc kubenswrapper[4605]: I1001 14:29:53.565598 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1aee75b7-664c-423d-91f8-7cb48ab1273e-utilities\") pod \"redhat-marketplace-sp4wt\" (UID: \"1aee75b7-664c-423d-91f8-7cb48ab1273e\") " pod="openshift-marketplace/redhat-marketplace-sp4wt" Oct 01 14:29:53 crc kubenswrapper[4605]: I1001 14:29:53.565918 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1aee75b7-664c-423d-91f8-7cb48ab1273e-catalog-content\") pod \"redhat-marketplace-sp4wt\" (UID: \"1aee75b7-664c-423d-91f8-7cb48ab1273e\") " pod="openshift-marketplace/redhat-marketplace-sp4wt" Oct 01 14:29:53 crc kubenswrapper[4605]: I1001 14:29:53.600209 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mk4p7\" (UniqueName: \"kubernetes.io/projected/1aee75b7-664c-423d-91f8-7cb48ab1273e-kube-api-access-mk4p7\") pod \"redhat-marketplace-sp4wt\" (UID: \"1aee75b7-664c-423d-91f8-7cb48ab1273e\") " pod="openshift-marketplace/redhat-marketplace-sp4wt" Oct 01 14:29:53 crc kubenswrapper[4605]: I1001 14:29:53.714957 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sp4wt" Oct 01 14:29:54 crc kubenswrapper[4605]: I1001 14:29:54.197932 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-sp4wt"] Oct 01 14:29:55 crc kubenswrapper[4605]: I1001 14:29:55.015900 4605 generic.go:334] "Generic (PLEG): container finished" podID="1aee75b7-664c-423d-91f8-7cb48ab1273e" containerID="ae431058900d27708593c1b68ee1014b9b39a56fe517d40d5240bf315a99adab" exitCode=0 Oct 01 14:29:55 crc kubenswrapper[4605]: I1001 14:29:55.016028 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sp4wt" event={"ID":"1aee75b7-664c-423d-91f8-7cb48ab1273e","Type":"ContainerDied","Data":"ae431058900d27708593c1b68ee1014b9b39a56fe517d40d5240bf315a99adab"} Oct 01 14:29:55 crc kubenswrapper[4605]: I1001 14:29:55.016253 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sp4wt" event={"ID":"1aee75b7-664c-423d-91f8-7cb48ab1273e","Type":"ContainerStarted","Data":"e360775c3863184bee86b1f0606c77b3d5b730788cb4227bdeb57d9a0349bfad"} Oct 01 14:29:56 crc kubenswrapper[4605]: I1001 14:29:56.026001 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sp4wt" event={"ID":"1aee75b7-664c-423d-91f8-7cb48ab1273e","Type":"ContainerStarted","Data":"1e90079f187368c6ac7974702b3ce658f88e6528943bf4bf4cde0ddeb79b36b0"} Oct 01 14:29:57 crc kubenswrapper[4605]: I1001 14:29:57.036201 4605 generic.go:334] "Generic (PLEG): container finished" podID="1aee75b7-664c-423d-91f8-7cb48ab1273e" containerID="1e90079f187368c6ac7974702b3ce658f88e6528943bf4bf4cde0ddeb79b36b0" exitCode=0 Oct 01 14:29:57 crc kubenswrapper[4605]: I1001 14:29:57.036304 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sp4wt" event={"ID":"1aee75b7-664c-423d-91f8-7cb48ab1273e","Type":"ContainerDied","Data":"1e90079f187368c6ac7974702b3ce658f88e6528943bf4bf4cde0ddeb79b36b0"} Oct 01 14:29:58 crc kubenswrapper[4605]: I1001 14:29:58.046469 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sp4wt" event={"ID":"1aee75b7-664c-423d-91f8-7cb48ab1273e","Type":"ContainerStarted","Data":"9f57f3c380e105d4abe29e3852127cd309dedf06b50d92502f8b08e3847533e9"} Oct 01 14:29:58 crc kubenswrapper[4605]: I1001 14:29:58.072309 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-sp4wt" podStartSLOduration=2.37957555 podStartE2EDuration="5.072287345s" podCreationTimestamp="2025-10-01 14:29:53 +0000 UTC" firstStartedPulling="2025-10-01 14:29:55.017959182 +0000 UTC m=+2717.761935390" lastFinishedPulling="2025-10-01 14:29:57.710670977 +0000 UTC m=+2720.454647185" observedRunningTime="2025-10-01 14:29:58.066449258 +0000 UTC m=+2720.810425466" watchObservedRunningTime="2025-10-01 14:29:58.072287345 +0000 UTC m=+2720.816263553" Oct 01 14:30:00 crc kubenswrapper[4605]: I1001 14:30:00.183478 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29322150-8v6r7"] Oct 01 14:30:00 crc kubenswrapper[4605]: I1001 14:30:00.185347 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29322150-8v6r7" Oct 01 14:30:00 crc kubenswrapper[4605]: I1001 14:30:00.187202 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 01 14:30:00 crc kubenswrapper[4605]: I1001 14:30:00.187722 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 01 14:30:00 crc kubenswrapper[4605]: I1001 14:30:00.192371 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29322150-8v6r7"] Oct 01 14:30:00 crc kubenswrapper[4605]: I1001 14:30:00.312847 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fkcwl\" (UniqueName: \"kubernetes.io/projected/fa48ae67-306f-4dfd-8b09-d633aa648286-kube-api-access-fkcwl\") pod \"collect-profiles-29322150-8v6r7\" (UID: \"fa48ae67-306f-4dfd-8b09-d633aa648286\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322150-8v6r7" Oct 01 14:30:00 crc kubenswrapper[4605]: I1001 14:30:00.313146 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fa48ae67-306f-4dfd-8b09-d633aa648286-secret-volume\") pod \"collect-profiles-29322150-8v6r7\" (UID: \"fa48ae67-306f-4dfd-8b09-d633aa648286\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322150-8v6r7" Oct 01 14:30:00 crc kubenswrapper[4605]: I1001 14:30:00.313333 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fa48ae67-306f-4dfd-8b09-d633aa648286-config-volume\") pod \"collect-profiles-29322150-8v6r7\" (UID: \"fa48ae67-306f-4dfd-8b09-d633aa648286\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322150-8v6r7" Oct 01 14:30:00 crc kubenswrapper[4605]: I1001 14:30:00.414856 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fa48ae67-306f-4dfd-8b09-d633aa648286-secret-volume\") pod \"collect-profiles-29322150-8v6r7\" (UID: \"fa48ae67-306f-4dfd-8b09-d633aa648286\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322150-8v6r7" Oct 01 14:30:00 crc kubenswrapper[4605]: I1001 14:30:00.414936 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fa48ae67-306f-4dfd-8b09-d633aa648286-config-volume\") pod \"collect-profiles-29322150-8v6r7\" (UID: \"fa48ae67-306f-4dfd-8b09-d633aa648286\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322150-8v6r7" Oct 01 14:30:00 crc kubenswrapper[4605]: I1001 14:30:00.414980 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fkcwl\" (UniqueName: \"kubernetes.io/projected/fa48ae67-306f-4dfd-8b09-d633aa648286-kube-api-access-fkcwl\") pod \"collect-profiles-29322150-8v6r7\" (UID: \"fa48ae67-306f-4dfd-8b09-d633aa648286\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322150-8v6r7" Oct 01 14:30:00 crc kubenswrapper[4605]: I1001 14:30:00.416233 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fa48ae67-306f-4dfd-8b09-d633aa648286-config-volume\") pod \"collect-profiles-29322150-8v6r7\" (UID: \"fa48ae67-306f-4dfd-8b09-d633aa648286\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322150-8v6r7" Oct 01 14:30:00 crc kubenswrapper[4605]: I1001 14:30:00.420662 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fa48ae67-306f-4dfd-8b09-d633aa648286-secret-volume\") pod \"collect-profiles-29322150-8v6r7\" (UID: \"fa48ae67-306f-4dfd-8b09-d633aa648286\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322150-8v6r7" Oct 01 14:30:00 crc kubenswrapper[4605]: I1001 14:30:00.432159 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fkcwl\" (UniqueName: \"kubernetes.io/projected/fa48ae67-306f-4dfd-8b09-d633aa648286-kube-api-access-fkcwl\") pod \"collect-profiles-29322150-8v6r7\" (UID: \"fa48ae67-306f-4dfd-8b09-d633aa648286\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322150-8v6r7" Oct 01 14:30:00 crc kubenswrapper[4605]: I1001 14:30:00.520045 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29322150-8v6r7" Oct 01 14:30:00 crc kubenswrapper[4605]: I1001 14:30:00.566219 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-fnhfb"] Oct 01 14:30:00 crc kubenswrapper[4605]: I1001 14:30:00.568244 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fnhfb" Oct 01 14:30:00 crc kubenswrapper[4605]: I1001 14:30:00.593218 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fnhfb"] Oct 01 14:30:00 crc kubenswrapper[4605]: I1001 14:30:00.622161 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/97eab765-715a-4f0c-ab6d-36ec1fe2aa40-catalog-content\") pod \"redhat-operators-fnhfb\" (UID: \"97eab765-715a-4f0c-ab6d-36ec1fe2aa40\") " pod="openshift-marketplace/redhat-operators-fnhfb" Oct 01 14:30:00 crc kubenswrapper[4605]: I1001 14:30:00.622310 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xrkf8\" (UniqueName: \"kubernetes.io/projected/97eab765-715a-4f0c-ab6d-36ec1fe2aa40-kube-api-access-xrkf8\") pod \"redhat-operators-fnhfb\" (UID: \"97eab765-715a-4f0c-ab6d-36ec1fe2aa40\") " pod="openshift-marketplace/redhat-operators-fnhfb" Oct 01 14:30:00 crc kubenswrapper[4605]: I1001 14:30:00.622498 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/97eab765-715a-4f0c-ab6d-36ec1fe2aa40-utilities\") pod \"redhat-operators-fnhfb\" (UID: \"97eab765-715a-4f0c-ab6d-36ec1fe2aa40\") " pod="openshift-marketplace/redhat-operators-fnhfb" Oct 01 14:30:00 crc kubenswrapper[4605]: I1001 14:30:00.723782 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xrkf8\" (UniqueName: \"kubernetes.io/projected/97eab765-715a-4f0c-ab6d-36ec1fe2aa40-kube-api-access-xrkf8\") pod \"redhat-operators-fnhfb\" (UID: \"97eab765-715a-4f0c-ab6d-36ec1fe2aa40\") " pod="openshift-marketplace/redhat-operators-fnhfb" Oct 01 14:30:00 crc kubenswrapper[4605]: I1001 14:30:00.724193 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/97eab765-715a-4f0c-ab6d-36ec1fe2aa40-utilities\") pod \"redhat-operators-fnhfb\" (UID: \"97eab765-715a-4f0c-ab6d-36ec1fe2aa40\") " pod="openshift-marketplace/redhat-operators-fnhfb" Oct 01 14:30:00 crc kubenswrapper[4605]: I1001 14:30:00.724286 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/97eab765-715a-4f0c-ab6d-36ec1fe2aa40-catalog-content\") pod \"redhat-operators-fnhfb\" (UID: \"97eab765-715a-4f0c-ab6d-36ec1fe2aa40\") " pod="openshift-marketplace/redhat-operators-fnhfb" Oct 01 14:30:00 crc kubenswrapper[4605]: I1001 14:30:00.724798 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/97eab765-715a-4f0c-ab6d-36ec1fe2aa40-catalog-content\") pod \"redhat-operators-fnhfb\" (UID: \"97eab765-715a-4f0c-ab6d-36ec1fe2aa40\") " pod="openshift-marketplace/redhat-operators-fnhfb" Oct 01 14:30:00 crc kubenswrapper[4605]: I1001 14:30:00.724854 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/97eab765-715a-4f0c-ab6d-36ec1fe2aa40-utilities\") pod \"redhat-operators-fnhfb\" (UID: \"97eab765-715a-4f0c-ab6d-36ec1fe2aa40\") " pod="openshift-marketplace/redhat-operators-fnhfb" Oct 01 14:30:00 crc kubenswrapper[4605]: I1001 14:30:00.747538 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xrkf8\" (UniqueName: \"kubernetes.io/projected/97eab765-715a-4f0c-ab6d-36ec1fe2aa40-kube-api-access-xrkf8\") pod \"redhat-operators-fnhfb\" (UID: \"97eab765-715a-4f0c-ab6d-36ec1fe2aa40\") " pod="openshift-marketplace/redhat-operators-fnhfb" Oct 01 14:30:00 crc kubenswrapper[4605]: I1001 14:30:00.961205 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fnhfb" Oct 01 14:30:01 crc kubenswrapper[4605]: I1001 14:30:01.014212 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29322150-8v6r7"] Oct 01 14:30:01 crc kubenswrapper[4605]: W1001 14:30:01.027071 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfa48ae67_306f_4dfd_8b09_d633aa648286.slice/crio-8dc0f13f755b12a9ae95b1f18a10bf7b2a485f5045256b511a61e5f4d65c7f0e WatchSource:0}: Error finding container 8dc0f13f755b12a9ae95b1f18a10bf7b2a485f5045256b511a61e5f4d65c7f0e: Status 404 returned error can't find the container with id 8dc0f13f755b12a9ae95b1f18a10bf7b2a485f5045256b511a61e5f4d65c7f0e Oct 01 14:30:01 crc kubenswrapper[4605]: I1001 14:30:01.087631 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29322150-8v6r7" event={"ID":"fa48ae67-306f-4dfd-8b09-d633aa648286","Type":"ContainerStarted","Data":"8dc0f13f755b12a9ae95b1f18a10bf7b2a485f5045256b511a61e5f4d65c7f0e"} Oct 01 14:30:01 crc kubenswrapper[4605]: I1001 14:30:01.269627 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fnhfb"] Oct 01 14:30:02 crc kubenswrapper[4605]: I1001 14:30:02.098772 4605 generic.go:334] "Generic (PLEG): container finished" podID="fa48ae67-306f-4dfd-8b09-d633aa648286" containerID="ca8f6d64dbca21cc4cf9cbd8973ee3dfb691dda08dca331eacb8934856cb0036" exitCode=0 Oct 01 14:30:02 crc kubenswrapper[4605]: I1001 14:30:02.098916 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29322150-8v6r7" event={"ID":"fa48ae67-306f-4dfd-8b09-d633aa648286","Type":"ContainerDied","Data":"ca8f6d64dbca21cc4cf9cbd8973ee3dfb691dda08dca331eacb8934856cb0036"} Oct 01 14:30:02 crc kubenswrapper[4605]: I1001 14:30:02.100302 4605 generic.go:334] "Generic (PLEG): container finished" podID="97eab765-715a-4f0c-ab6d-36ec1fe2aa40" containerID="e1cdfa8768a3c8f8d9c22f429c165c5b915127edc1a6765010f804e64a03d3c3" exitCode=0 Oct 01 14:30:02 crc kubenswrapper[4605]: I1001 14:30:02.100331 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fnhfb" event={"ID":"97eab765-715a-4f0c-ab6d-36ec1fe2aa40","Type":"ContainerDied","Data":"e1cdfa8768a3c8f8d9c22f429c165c5b915127edc1a6765010f804e64a03d3c3"} Oct 01 14:30:02 crc kubenswrapper[4605]: I1001 14:30:02.100347 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fnhfb" event={"ID":"97eab765-715a-4f0c-ab6d-36ec1fe2aa40","Type":"ContainerStarted","Data":"2a1176dc8d0f2f643e929627c9ae8f6195bb432e5b2bc41b68b80966db901a52"} Oct 01 14:30:03 crc kubenswrapper[4605]: I1001 14:30:03.110321 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fnhfb" event={"ID":"97eab765-715a-4f0c-ab6d-36ec1fe2aa40","Type":"ContainerStarted","Data":"1870577a6d599ef2d3daeea1d279ad77d93bf7c5eee2793857441055a6a403f4"} Oct 01 14:30:03 crc kubenswrapper[4605]: I1001 14:30:03.492539 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29322150-8v6r7" Oct 01 14:30:03 crc kubenswrapper[4605]: I1001 14:30:03.582387 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fa48ae67-306f-4dfd-8b09-d633aa648286-config-volume\") pod \"fa48ae67-306f-4dfd-8b09-d633aa648286\" (UID: \"fa48ae67-306f-4dfd-8b09-d633aa648286\") " Oct 01 14:30:03 crc kubenswrapper[4605]: I1001 14:30:03.582737 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fa48ae67-306f-4dfd-8b09-d633aa648286-secret-volume\") pod \"fa48ae67-306f-4dfd-8b09-d633aa648286\" (UID: \"fa48ae67-306f-4dfd-8b09-d633aa648286\") " Oct 01 14:30:03 crc kubenswrapper[4605]: I1001 14:30:03.583540 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fkcwl\" (UniqueName: \"kubernetes.io/projected/fa48ae67-306f-4dfd-8b09-d633aa648286-kube-api-access-fkcwl\") pod \"fa48ae67-306f-4dfd-8b09-d633aa648286\" (UID: \"fa48ae67-306f-4dfd-8b09-d633aa648286\") " Oct 01 14:30:03 crc kubenswrapper[4605]: I1001 14:30:03.582995 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fa48ae67-306f-4dfd-8b09-d633aa648286-config-volume" (OuterVolumeSpecName: "config-volume") pod "fa48ae67-306f-4dfd-8b09-d633aa648286" (UID: "fa48ae67-306f-4dfd-8b09-d633aa648286"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:30:03 crc kubenswrapper[4605]: I1001 14:30:03.584083 4605 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fa48ae67-306f-4dfd-8b09-d633aa648286-config-volume\") on node \"crc\" DevicePath \"\"" Oct 01 14:30:03 crc kubenswrapper[4605]: I1001 14:30:03.588744 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fa48ae67-306f-4dfd-8b09-d633aa648286-kube-api-access-fkcwl" (OuterVolumeSpecName: "kube-api-access-fkcwl") pod "fa48ae67-306f-4dfd-8b09-d633aa648286" (UID: "fa48ae67-306f-4dfd-8b09-d633aa648286"). InnerVolumeSpecName "kube-api-access-fkcwl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:30:03 crc kubenswrapper[4605]: I1001 14:30:03.589044 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa48ae67-306f-4dfd-8b09-d633aa648286-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "fa48ae67-306f-4dfd-8b09-d633aa648286" (UID: "fa48ae67-306f-4dfd-8b09-d633aa648286"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:30:03 crc kubenswrapper[4605]: I1001 14:30:03.685639 4605 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fa48ae67-306f-4dfd-8b09-d633aa648286-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 01 14:30:03 crc kubenswrapper[4605]: I1001 14:30:03.685674 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fkcwl\" (UniqueName: \"kubernetes.io/projected/fa48ae67-306f-4dfd-8b09-d633aa648286-kube-api-access-fkcwl\") on node \"crc\" DevicePath \"\"" Oct 01 14:30:03 crc kubenswrapper[4605]: I1001 14:30:03.715691 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-sp4wt" Oct 01 14:30:03 crc kubenswrapper[4605]: I1001 14:30:03.716337 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-sp4wt" Oct 01 14:30:03 crc kubenswrapper[4605]: I1001 14:30:03.762805 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-sp4wt" Oct 01 14:30:04 crc kubenswrapper[4605]: I1001 14:30:04.120650 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29322150-8v6r7" Oct 01 14:30:04 crc kubenswrapper[4605]: I1001 14:30:04.120794 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29322150-8v6r7" event={"ID":"fa48ae67-306f-4dfd-8b09-d633aa648286","Type":"ContainerDied","Data":"8dc0f13f755b12a9ae95b1f18a10bf7b2a485f5045256b511a61e5f4d65c7f0e"} Oct 01 14:30:04 crc kubenswrapper[4605]: I1001 14:30:04.120960 4605 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8dc0f13f755b12a9ae95b1f18a10bf7b2a485f5045256b511a61e5f4d65c7f0e" Oct 01 14:30:04 crc kubenswrapper[4605]: I1001 14:30:04.177790 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-sp4wt" Oct 01 14:30:04 crc kubenswrapper[4605]: I1001 14:30:04.570314 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29322105-br8qr"] Oct 01 14:30:04 crc kubenswrapper[4605]: I1001 14:30:04.576970 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29322105-br8qr"] Oct 01 14:30:05 crc kubenswrapper[4605]: I1001 14:30:05.943005 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ea3e40db-79b6-4499-b7e6-71bd46c55663" path="/var/lib/kubelet/pods/ea3e40db-79b6-4499-b7e6-71bd46c55663/volumes" Oct 01 14:30:05 crc kubenswrapper[4605]: I1001 14:30:05.961326 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-sp4wt"] Oct 01 14:30:06 crc kubenswrapper[4605]: I1001 14:30:06.143218 4605 generic.go:334] "Generic (PLEG): container finished" podID="97eab765-715a-4f0c-ab6d-36ec1fe2aa40" containerID="1870577a6d599ef2d3daeea1d279ad77d93bf7c5eee2793857441055a6a403f4" exitCode=0 Oct 01 14:30:06 crc kubenswrapper[4605]: I1001 14:30:06.144305 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fnhfb" event={"ID":"97eab765-715a-4f0c-ab6d-36ec1fe2aa40","Type":"ContainerDied","Data":"1870577a6d599ef2d3daeea1d279ad77d93bf7c5eee2793857441055a6a403f4"} Oct 01 14:30:07 crc kubenswrapper[4605]: I1001 14:30:07.169901 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fnhfb" event={"ID":"97eab765-715a-4f0c-ab6d-36ec1fe2aa40","Type":"ContainerStarted","Data":"31fac9e6d2922767cc71105c7d171b1c962413ac6fb1cb7a7d6da39e543818ad"} Oct 01 14:30:07 crc kubenswrapper[4605]: I1001 14:30:07.170105 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-sp4wt" podUID="1aee75b7-664c-423d-91f8-7cb48ab1273e" containerName="registry-server" containerID="cri-o://9f57f3c380e105d4abe29e3852127cd309dedf06b50d92502f8b08e3847533e9" gracePeriod=2 Oct 01 14:30:07 crc kubenswrapper[4605]: I1001 14:30:07.200840 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-fnhfb" podStartSLOduration=2.721151884 podStartE2EDuration="7.200821807s" podCreationTimestamp="2025-10-01 14:30:00 +0000 UTC" firstStartedPulling="2025-10-01 14:30:02.101929445 +0000 UTC m=+2724.845905673" lastFinishedPulling="2025-10-01 14:30:06.581599388 +0000 UTC m=+2729.325575596" observedRunningTime="2025-10-01 14:30:07.189438441 +0000 UTC m=+2729.933414649" watchObservedRunningTime="2025-10-01 14:30:07.200821807 +0000 UTC m=+2729.944798015" Oct 01 14:30:08 crc kubenswrapper[4605]: I1001 14:30:08.179063 4605 generic.go:334] "Generic (PLEG): container finished" podID="1aee75b7-664c-423d-91f8-7cb48ab1273e" containerID="9f57f3c380e105d4abe29e3852127cd309dedf06b50d92502f8b08e3847533e9" exitCode=0 Oct 01 14:30:08 crc kubenswrapper[4605]: I1001 14:30:08.179171 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sp4wt" event={"ID":"1aee75b7-664c-423d-91f8-7cb48ab1273e","Type":"ContainerDied","Data":"9f57f3c380e105d4abe29e3852127cd309dedf06b50d92502f8b08e3847533e9"} Oct 01 14:30:08 crc kubenswrapper[4605]: I1001 14:30:08.298954 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sp4wt" Oct 01 14:30:08 crc kubenswrapper[4605]: I1001 14:30:08.490784 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mk4p7\" (UniqueName: \"kubernetes.io/projected/1aee75b7-664c-423d-91f8-7cb48ab1273e-kube-api-access-mk4p7\") pod \"1aee75b7-664c-423d-91f8-7cb48ab1273e\" (UID: \"1aee75b7-664c-423d-91f8-7cb48ab1273e\") " Oct 01 14:30:08 crc kubenswrapper[4605]: I1001 14:30:08.491321 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1aee75b7-664c-423d-91f8-7cb48ab1273e-catalog-content\") pod \"1aee75b7-664c-423d-91f8-7cb48ab1273e\" (UID: \"1aee75b7-664c-423d-91f8-7cb48ab1273e\") " Oct 01 14:30:08 crc kubenswrapper[4605]: I1001 14:30:08.491420 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1aee75b7-664c-423d-91f8-7cb48ab1273e-utilities\") pod \"1aee75b7-664c-423d-91f8-7cb48ab1273e\" (UID: \"1aee75b7-664c-423d-91f8-7cb48ab1273e\") " Oct 01 14:30:08 crc kubenswrapper[4605]: I1001 14:30:08.492461 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1aee75b7-664c-423d-91f8-7cb48ab1273e-utilities" (OuterVolumeSpecName: "utilities") pod "1aee75b7-664c-423d-91f8-7cb48ab1273e" (UID: "1aee75b7-664c-423d-91f8-7cb48ab1273e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:30:08 crc kubenswrapper[4605]: I1001 14:30:08.498943 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1aee75b7-664c-423d-91f8-7cb48ab1273e-kube-api-access-mk4p7" (OuterVolumeSpecName: "kube-api-access-mk4p7") pod "1aee75b7-664c-423d-91f8-7cb48ab1273e" (UID: "1aee75b7-664c-423d-91f8-7cb48ab1273e"). InnerVolumeSpecName "kube-api-access-mk4p7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:30:08 crc kubenswrapper[4605]: I1001 14:30:08.503623 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1aee75b7-664c-423d-91f8-7cb48ab1273e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1aee75b7-664c-423d-91f8-7cb48ab1273e" (UID: "1aee75b7-664c-423d-91f8-7cb48ab1273e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:30:08 crc kubenswrapper[4605]: I1001 14:30:08.593845 4605 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1aee75b7-664c-423d-91f8-7cb48ab1273e-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 14:30:08 crc kubenswrapper[4605]: I1001 14:30:08.593896 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mk4p7\" (UniqueName: \"kubernetes.io/projected/1aee75b7-664c-423d-91f8-7cb48ab1273e-kube-api-access-mk4p7\") on node \"crc\" DevicePath \"\"" Oct 01 14:30:08 crc kubenswrapper[4605]: I1001 14:30:08.593908 4605 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1aee75b7-664c-423d-91f8-7cb48ab1273e-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 14:30:09 crc kubenswrapper[4605]: I1001 14:30:09.193635 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sp4wt" event={"ID":"1aee75b7-664c-423d-91f8-7cb48ab1273e","Type":"ContainerDied","Data":"e360775c3863184bee86b1f0606c77b3d5b730788cb4227bdeb57d9a0349bfad"} Oct 01 14:30:09 crc kubenswrapper[4605]: I1001 14:30:09.193698 4605 scope.go:117] "RemoveContainer" containerID="9f57f3c380e105d4abe29e3852127cd309dedf06b50d92502f8b08e3847533e9" Oct 01 14:30:09 crc kubenswrapper[4605]: I1001 14:30:09.193887 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sp4wt" Oct 01 14:30:09 crc kubenswrapper[4605]: I1001 14:30:09.238189 4605 scope.go:117] "RemoveContainer" containerID="1e90079f187368c6ac7974702b3ce658f88e6528943bf4bf4cde0ddeb79b36b0" Oct 01 14:30:09 crc kubenswrapper[4605]: I1001 14:30:09.242244 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-sp4wt"] Oct 01 14:30:09 crc kubenswrapper[4605]: I1001 14:30:09.252862 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-sp4wt"] Oct 01 14:30:09 crc kubenswrapper[4605]: I1001 14:30:09.275560 4605 scope.go:117] "RemoveContainer" containerID="ae431058900d27708593c1b68ee1014b9b39a56fe517d40d5240bf315a99adab" Oct 01 14:30:09 crc kubenswrapper[4605]: I1001 14:30:09.938022 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1aee75b7-664c-423d-91f8-7cb48ab1273e" path="/var/lib/kubelet/pods/1aee75b7-664c-423d-91f8-7cb48ab1273e/volumes" Oct 01 14:30:10 crc kubenswrapper[4605]: I1001 14:30:10.962347 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-fnhfb" Oct 01 14:30:10 crc kubenswrapper[4605]: I1001 14:30:10.962401 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-fnhfb" Oct 01 14:30:12 crc kubenswrapper[4605]: I1001 14:30:12.007525 4605 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-fnhfb" podUID="97eab765-715a-4f0c-ab6d-36ec1fe2aa40" containerName="registry-server" probeResult="failure" output=< Oct 01 14:30:12 crc kubenswrapper[4605]: timeout: failed to connect service ":50051" within 1s Oct 01 14:30:12 crc kubenswrapper[4605]: > Oct 01 14:30:21 crc kubenswrapper[4605]: I1001 14:30:21.032581 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-fnhfb" Oct 01 14:30:21 crc kubenswrapper[4605]: I1001 14:30:21.093788 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-fnhfb" Oct 01 14:30:21 crc kubenswrapper[4605]: I1001 14:30:21.278334 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-fnhfb"] Oct 01 14:30:21 crc kubenswrapper[4605]: I1001 14:30:21.631119 4605 patch_prober.go:28] interesting pod/machine-config-daemon-zdjh7 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 14:30:21 crc kubenswrapper[4605]: I1001 14:30:21.631215 4605 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 14:30:22 crc kubenswrapper[4605]: I1001 14:30:22.306357 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-fnhfb" podUID="97eab765-715a-4f0c-ab6d-36ec1fe2aa40" containerName="registry-server" containerID="cri-o://31fac9e6d2922767cc71105c7d171b1c962413ac6fb1cb7a7d6da39e543818ad" gracePeriod=2 Oct 01 14:30:22 crc kubenswrapper[4605]: I1001 14:30:22.787837 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fnhfb" Oct 01 14:30:22 crc kubenswrapper[4605]: I1001 14:30:22.959293 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/97eab765-715a-4f0c-ab6d-36ec1fe2aa40-utilities\") pod \"97eab765-715a-4f0c-ab6d-36ec1fe2aa40\" (UID: \"97eab765-715a-4f0c-ab6d-36ec1fe2aa40\") " Oct 01 14:30:22 crc kubenswrapper[4605]: I1001 14:30:22.959440 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/97eab765-715a-4f0c-ab6d-36ec1fe2aa40-catalog-content\") pod \"97eab765-715a-4f0c-ab6d-36ec1fe2aa40\" (UID: \"97eab765-715a-4f0c-ab6d-36ec1fe2aa40\") " Oct 01 14:30:22 crc kubenswrapper[4605]: I1001 14:30:22.959511 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xrkf8\" (UniqueName: \"kubernetes.io/projected/97eab765-715a-4f0c-ab6d-36ec1fe2aa40-kube-api-access-xrkf8\") pod \"97eab765-715a-4f0c-ab6d-36ec1fe2aa40\" (UID: \"97eab765-715a-4f0c-ab6d-36ec1fe2aa40\") " Oct 01 14:30:22 crc kubenswrapper[4605]: I1001 14:30:22.960188 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/97eab765-715a-4f0c-ab6d-36ec1fe2aa40-utilities" (OuterVolumeSpecName: "utilities") pod "97eab765-715a-4f0c-ab6d-36ec1fe2aa40" (UID: "97eab765-715a-4f0c-ab6d-36ec1fe2aa40"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:30:22 crc kubenswrapper[4605]: I1001 14:30:22.965559 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/97eab765-715a-4f0c-ab6d-36ec1fe2aa40-kube-api-access-xrkf8" (OuterVolumeSpecName: "kube-api-access-xrkf8") pod "97eab765-715a-4f0c-ab6d-36ec1fe2aa40" (UID: "97eab765-715a-4f0c-ab6d-36ec1fe2aa40"). InnerVolumeSpecName "kube-api-access-xrkf8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:30:23 crc kubenswrapper[4605]: I1001 14:30:23.050708 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/97eab765-715a-4f0c-ab6d-36ec1fe2aa40-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "97eab765-715a-4f0c-ab6d-36ec1fe2aa40" (UID: "97eab765-715a-4f0c-ab6d-36ec1fe2aa40"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:30:23 crc kubenswrapper[4605]: I1001 14:30:23.062072 4605 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/97eab765-715a-4f0c-ab6d-36ec1fe2aa40-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 14:30:23 crc kubenswrapper[4605]: I1001 14:30:23.062135 4605 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/97eab765-715a-4f0c-ab6d-36ec1fe2aa40-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 14:30:23 crc kubenswrapper[4605]: I1001 14:30:23.062151 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xrkf8\" (UniqueName: \"kubernetes.io/projected/97eab765-715a-4f0c-ab6d-36ec1fe2aa40-kube-api-access-xrkf8\") on node \"crc\" DevicePath \"\"" Oct 01 14:30:23 crc kubenswrapper[4605]: I1001 14:30:23.319599 4605 generic.go:334] "Generic (PLEG): container finished" podID="97eab765-715a-4f0c-ab6d-36ec1fe2aa40" containerID="31fac9e6d2922767cc71105c7d171b1c962413ac6fb1cb7a7d6da39e543818ad" exitCode=0 Oct 01 14:30:23 crc kubenswrapper[4605]: I1001 14:30:23.319669 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fnhfb" Oct 01 14:30:23 crc kubenswrapper[4605]: I1001 14:30:23.319661 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fnhfb" event={"ID":"97eab765-715a-4f0c-ab6d-36ec1fe2aa40","Type":"ContainerDied","Data":"31fac9e6d2922767cc71105c7d171b1c962413ac6fb1cb7a7d6da39e543818ad"} Oct 01 14:30:23 crc kubenswrapper[4605]: I1001 14:30:23.320163 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fnhfb" event={"ID":"97eab765-715a-4f0c-ab6d-36ec1fe2aa40","Type":"ContainerDied","Data":"2a1176dc8d0f2f643e929627c9ae8f6195bb432e5b2bc41b68b80966db901a52"} Oct 01 14:30:23 crc kubenswrapper[4605]: I1001 14:30:23.320196 4605 scope.go:117] "RemoveContainer" containerID="31fac9e6d2922767cc71105c7d171b1c962413ac6fb1cb7a7d6da39e543818ad" Oct 01 14:30:23 crc kubenswrapper[4605]: I1001 14:30:23.359837 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-fnhfb"] Oct 01 14:30:23 crc kubenswrapper[4605]: I1001 14:30:23.361847 4605 scope.go:117] "RemoveContainer" containerID="1870577a6d599ef2d3daeea1d279ad77d93bf7c5eee2793857441055a6a403f4" Oct 01 14:30:23 crc kubenswrapper[4605]: I1001 14:30:23.369586 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-fnhfb"] Oct 01 14:30:23 crc kubenswrapper[4605]: I1001 14:30:23.406548 4605 scope.go:117] "RemoveContainer" containerID="e1cdfa8768a3c8f8d9c22f429c165c5b915127edc1a6765010f804e64a03d3c3" Oct 01 14:30:23 crc kubenswrapper[4605]: I1001 14:30:23.452881 4605 scope.go:117] "RemoveContainer" containerID="31fac9e6d2922767cc71105c7d171b1c962413ac6fb1cb7a7d6da39e543818ad" Oct 01 14:30:23 crc kubenswrapper[4605]: E1001 14:30:23.453368 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"31fac9e6d2922767cc71105c7d171b1c962413ac6fb1cb7a7d6da39e543818ad\": container with ID starting with 31fac9e6d2922767cc71105c7d171b1c962413ac6fb1cb7a7d6da39e543818ad not found: ID does not exist" containerID="31fac9e6d2922767cc71105c7d171b1c962413ac6fb1cb7a7d6da39e543818ad" Oct 01 14:30:23 crc kubenswrapper[4605]: I1001 14:30:23.453411 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"31fac9e6d2922767cc71105c7d171b1c962413ac6fb1cb7a7d6da39e543818ad"} err="failed to get container status \"31fac9e6d2922767cc71105c7d171b1c962413ac6fb1cb7a7d6da39e543818ad\": rpc error: code = NotFound desc = could not find container \"31fac9e6d2922767cc71105c7d171b1c962413ac6fb1cb7a7d6da39e543818ad\": container with ID starting with 31fac9e6d2922767cc71105c7d171b1c962413ac6fb1cb7a7d6da39e543818ad not found: ID does not exist" Oct 01 14:30:23 crc kubenswrapper[4605]: I1001 14:30:23.453439 4605 scope.go:117] "RemoveContainer" containerID="1870577a6d599ef2d3daeea1d279ad77d93bf7c5eee2793857441055a6a403f4" Oct 01 14:30:23 crc kubenswrapper[4605]: E1001 14:30:23.458519 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1870577a6d599ef2d3daeea1d279ad77d93bf7c5eee2793857441055a6a403f4\": container with ID starting with 1870577a6d599ef2d3daeea1d279ad77d93bf7c5eee2793857441055a6a403f4 not found: ID does not exist" containerID="1870577a6d599ef2d3daeea1d279ad77d93bf7c5eee2793857441055a6a403f4" Oct 01 14:30:23 crc kubenswrapper[4605]: I1001 14:30:23.458557 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1870577a6d599ef2d3daeea1d279ad77d93bf7c5eee2793857441055a6a403f4"} err="failed to get container status \"1870577a6d599ef2d3daeea1d279ad77d93bf7c5eee2793857441055a6a403f4\": rpc error: code = NotFound desc = could not find container \"1870577a6d599ef2d3daeea1d279ad77d93bf7c5eee2793857441055a6a403f4\": container with ID starting with 1870577a6d599ef2d3daeea1d279ad77d93bf7c5eee2793857441055a6a403f4 not found: ID does not exist" Oct 01 14:30:23 crc kubenswrapper[4605]: I1001 14:30:23.458586 4605 scope.go:117] "RemoveContainer" containerID="e1cdfa8768a3c8f8d9c22f429c165c5b915127edc1a6765010f804e64a03d3c3" Oct 01 14:30:23 crc kubenswrapper[4605]: E1001 14:30:23.459897 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e1cdfa8768a3c8f8d9c22f429c165c5b915127edc1a6765010f804e64a03d3c3\": container with ID starting with e1cdfa8768a3c8f8d9c22f429c165c5b915127edc1a6765010f804e64a03d3c3 not found: ID does not exist" containerID="e1cdfa8768a3c8f8d9c22f429c165c5b915127edc1a6765010f804e64a03d3c3" Oct 01 14:30:23 crc kubenswrapper[4605]: I1001 14:30:23.459936 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e1cdfa8768a3c8f8d9c22f429c165c5b915127edc1a6765010f804e64a03d3c3"} err="failed to get container status \"e1cdfa8768a3c8f8d9c22f429c165c5b915127edc1a6765010f804e64a03d3c3\": rpc error: code = NotFound desc = could not find container \"e1cdfa8768a3c8f8d9c22f429c165c5b915127edc1a6765010f804e64a03d3c3\": container with ID starting with e1cdfa8768a3c8f8d9c22f429c165c5b915127edc1a6765010f804e64a03d3c3 not found: ID does not exist" Oct 01 14:30:23 crc kubenswrapper[4605]: I1001 14:30:23.937058 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="97eab765-715a-4f0c-ab6d-36ec1fe2aa40" path="/var/lib/kubelet/pods/97eab765-715a-4f0c-ab6d-36ec1fe2aa40/volumes" Oct 01 14:30:51 crc kubenswrapper[4605]: I1001 14:30:51.631494 4605 patch_prober.go:28] interesting pod/machine-config-daemon-zdjh7 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 14:30:51 crc kubenswrapper[4605]: I1001 14:30:51.632035 4605 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 14:30:51 crc kubenswrapper[4605]: I1001 14:30:51.808854 4605 scope.go:117] "RemoveContainer" containerID="7541ddd5837ac92c9e1d93dfba42f821bf4ae79da8f3d4c3a1e40a4700fbd9e8" Oct 01 14:31:21 crc kubenswrapper[4605]: I1001 14:31:21.631017 4605 patch_prober.go:28] interesting pod/machine-config-daemon-zdjh7 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 14:31:21 crc kubenswrapper[4605]: I1001 14:31:21.632813 4605 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 14:31:21 crc kubenswrapper[4605]: I1001 14:31:21.632964 4605 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" Oct 01 14:31:21 crc kubenswrapper[4605]: I1001 14:31:21.633813 4605 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"2b12842fc21dc955a6e9c6b86d51ef221646a4c444d7a262b1bedd2def659168"} pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 01 14:31:21 crc kubenswrapper[4605]: I1001 14:31:21.633879 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" containerName="machine-config-daemon" containerID="cri-o://2b12842fc21dc955a6e9c6b86d51ef221646a4c444d7a262b1bedd2def659168" gracePeriod=600 Oct 01 14:31:21 crc kubenswrapper[4605]: I1001 14:31:21.854348 4605 generic.go:334] "Generic (PLEG): container finished" podID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" containerID="2b12842fc21dc955a6e9c6b86d51ef221646a4c444d7a262b1bedd2def659168" exitCode=0 Oct 01 14:31:21 crc kubenswrapper[4605]: I1001 14:31:21.854393 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" event={"ID":"f3023060-c8ae-492b-b1cb-a418d9a8e59f","Type":"ContainerDied","Data":"2b12842fc21dc955a6e9c6b86d51ef221646a4c444d7a262b1bedd2def659168"} Oct 01 14:31:21 crc kubenswrapper[4605]: I1001 14:31:21.854424 4605 scope.go:117] "RemoveContainer" containerID="67fab076341529dea5f6186b116af7d19b7aea3960811da63be1260baa5a897e" Oct 01 14:31:22 crc kubenswrapper[4605]: I1001 14:31:22.866664 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" event={"ID":"f3023060-c8ae-492b-b1cb-a418d9a8e59f","Type":"ContainerStarted","Data":"65c5b0f77ed3d625b4751ed14a86e021ac1ed482b076d853d9e9fd4b1ec77e97"} Oct 01 14:32:08 crc kubenswrapper[4605]: I1001 14:32:08.263792 4605 generic.go:334] "Generic (PLEG): container finished" podID="58600359-0fa8-4801-a1d3-87598ba13651" containerID="d1ad867854729efc999e2cac699d410e749f09b74e474857bf5c033580d43ddc" exitCode=0 Oct 01 14:32:08 crc kubenswrapper[4605]: I1001 14:32:08.263964 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-6bd7l" event={"ID":"58600359-0fa8-4801-a1d3-87598ba13651","Type":"ContainerDied","Data":"d1ad867854729efc999e2cac699d410e749f09b74e474857bf5c033580d43ddc"} Oct 01 14:32:09 crc kubenswrapper[4605]: I1001 14:32:09.691779 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-6bd7l" Oct 01 14:32:09 crc kubenswrapper[4605]: I1001 14:32:09.735101 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58600359-0fa8-4801-a1d3-87598ba13651-telemetry-combined-ca-bundle\") pod \"58600359-0fa8-4801-a1d3-87598ba13651\" (UID: \"58600359-0fa8-4801-a1d3-87598ba13651\") " Oct 01 14:32:09 crc kubenswrapper[4605]: I1001 14:32:09.735185 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/58600359-0fa8-4801-a1d3-87598ba13651-ceilometer-compute-config-data-1\") pod \"58600359-0fa8-4801-a1d3-87598ba13651\" (UID: \"58600359-0fa8-4801-a1d3-87598ba13651\") " Oct 01 14:32:09 crc kubenswrapper[4605]: I1001 14:32:09.735307 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/58600359-0fa8-4801-a1d3-87598ba13651-ssh-key\") pod \"58600359-0fa8-4801-a1d3-87598ba13651\" (UID: \"58600359-0fa8-4801-a1d3-87598ba13651\") " Oct 01 14:32:09 crc kubenswrapper[4605]: I1001 14:32:09.735361 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kxz77\" (UniqueName: \"kubernetes.io/projected/58600359-0fa8-4801-a1d3-87598ba13651-kube-api-access-kxz77\") pod \"58600359-0fa8-4801-a1d3-87598ba13651\" (UID: \"58600359-0fa8-4801-a1d3-87598ba13651\") " Oct 01 14:32:09 crc kubenswrapper[4605]: I1001 14:32:09.735391 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/58600359-0fa8-4801-a1d3-87598ba13651-inventory\") pod \"58600359-0fa8-4801-a1d3-87598ba13651\" (UID: \"58600359-0fa8-4801-a1d3-87598ba13651\") " Oct 01 14:32:09 crc kubenswrapper[4605]: I1001 14:32:09.735412 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/58600359-0fa8-4801-a1d3-87598ba13651-ceilometer-compute-config-data-0\") pod \"58600359-0fa8-4801-a1d3-87598ba13651\" (UID: \"58600359-0fa8-4801-a1d3-87598ba13651\") " Oct 01 14:32:09 crc kubenswrapper[4605]: I1001 14:32:09.735467 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/58600359-0fa8-4801-a1d3-87598ba13651-ceilometer-compute-config-data-2\") pod \"58600359-0fa8-4801-a1d3-87598ba13651\" (UID: \"58600359-0fa8-4801-a1d3-87598ba13651\") " Oct 01 14:32:09 crc kubenswrapper[4605]: I1001 14:32:09.740909 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/58600359-0fa8-4801-a1d3-87598ba13651-kube-api-access-kxz77" (OuterVolumeSpecName: "kube-api-access-kxz77") pod "58600359-0fa8-4801-a1d3-87598ba13651" (UID: "58600359-0fa8-4801-a1d3-87598ba13651"). InnerVolumeSpecName "kube-api-access-kxz77". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:32:09 crc kubenswrapper[4605]: I1001 14:32:09.769300 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/58600359-0fa8-4801-a1d3-87598ba13651-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "58600359-0fa8-4801-a1d3-87598ba13651" (UID: "58600359-0fa8-4801-a1d3-87598ba13651"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:32:09 crc kubenswrapper[4605]: I1001 14:32:09.769566 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/58600359-0fa8-4801-a1d3-87598ba13651-ceilometer-compute-config-data-0" (OuterVolumeSpecName: "ceilometer-compute-config-data-0") pod "58600359-0fa8-4801-a1d3-87598ba13651" (UID: "58600359-0fa8-4801-a1d3-87598ba13651"). InnerVolumeSpecName "ceilometer-compute-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:32:09 crc kubenswrapper[4605]: I1001 14:32:09.772377 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/58600359-0fa8-4801-a1d3-87598ba13651-ceilometer-compute-config-data-2" (OuterVolumeSpecName: "ceilometer-compute-config-data-2") pod "58600359-0fa8-4801-a1d3-87598ba13651" (UID: "58600359-0fa8-4801-a1d3-87598ba13651"). InnerVolumeSpecName "ceilometer-compute-config-data-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:32:09 crc kubenswrapper[4605]: I1001 14:32:09.781927 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/58600359-0fa8-4801-a1d3-87598ba13651-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "58600359-0fa8-4801-a1d3-87598ba13651" (UID: "58600359-0fa8-4801-a1d3-87598ba13651"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:32:09 crc kubenswrapper[4605]: I1001 14:32:09.785618 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/58600359-0fa8-4801-a1d3-87598ba13651-ceilometer-compute-config-data-1" (OuterVolumeSpecName: "ceilometer-compute-config-data-1") pod "58600359-0fa8-4801-a1d3-87598ba13651" (UID: "58600359-0fa8-4801-a1d3-87598ba13651"). InnerVolumeSpecName "ceilometer-compute-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:32:09 crc kubenswrapper[4605]: I1001 14:32:09.787688 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/58600359-0fa8-4801-a1d3-87598ba13651-inventory" (OuterVolumeSpecName: "inventory") pod "58600359-0fa8-4801-a1d3-87598ba13651" (UID: "58600359-0fa8-4801-a1d3-87598ba13651"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:32:09 crc kubenswrapper[4605]: I1001 14:32:09.838068 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kxz77\" (UniqueName: \"kubernetes.io/projected/58600359-0fa8-4801-a1d3-87598ba13651-kube-api-access-kxz77\") on node \"crc\" DevicePath \"\"" Oct 01 14:32:09 crc kubenswrapper[4605]: I1001 14:32:09.838123 4605 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/58600359-0fa8-4801-a1d3-87598ba13651-inventory\") on node \"crc\" DevicePath \"\"" Oct 01 14:32:09 crc kubenswrapper[4605]: I1001 14:32:09.838138 4605 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/58600359-0fa8-4801-a1d3-87598ba13651-ceilometer-compute-config-data-0\") on node \"crc\" DevicePath \"\"" Oct 01 14:32:09 crc kubenswrapper[4605]: I1001 14:32:09.838152 4605 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/58600359-0fa8-4801-a1d3-87598ba13651-ceilometer-compute-config-data-2\") on node \"crc\" DevicePath \"\"" Oct 01 14:32:09 crc kubenswrapper[4605]: I1001 14:32:09.838164 4605 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58600359-0fa8-4801-a1d3-87598ba13651-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 14:32:09 crc kubenswrapper[4605]: I1001 14:32:09.838175 4605 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/58600359-0fa8-4801-a1d3-87598ba13651-ceilometer-compute-config-data-1\") on node \"crc\" DevicePath \"\"" Oct 01 14:32:09 crc kubenswrapper[4605]: I1001 14:32:09.838185 4605 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/58600359-0fa8-4801-a1d3-87598ba13651-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 01 14:32:10 crc kubenswrapper[4605]: I1001 14:32:10.281956 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-6bd7l" event={"ID":"58600359-0fa8-4801-a1d3-87598ba13651","Type":"ContainerDied","Data":"aac97b40718047f143ea2a7beb845ece94aadd554ba873d1ce7a40983547155b"} Oct 01 14:32:10 crc kubenswrapper[4605]: I1001 14:32:10.282400 4605 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="aac97b40718047f143ea2a7beb845ece94aadd554ba873d1ce7a40983547155b" Oct 01 14:32:10 crc kubenswrapper[4605]: I1001 14:32:10.282231 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-6bd7l" Oct 01 14:33:12 crc kubenswrapper[4605]: I1001 14:33:12.586428 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tempest-tests-tempest"] Oct 01 14:33:12 crc kubenswrapper[4605]: E1001 14:33:12.587253 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97eab765-715a-4f0c-ab6d-36ec1fe2aa40" containerName="registry-server" Oct 01 14:33:12 crc kubenswrapper[4605]: I1001 14:33:12.587265 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="97eab765-715a-4f0c-ab6d-36ec1fe2aa40" containerName="registry-server" Oct 01 14:33:12 crc kubenswrapper[4605]: E1001 14:33:12.587282 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97eab765-715a-4f0c-ab6d-36ec1fe2aa40" containerName="extract-content" Oct 01 14:33:12 crc kubenswrapper[4605]: I1001 14:33:12.587288 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="97eab765-715a-4f0c-ab6d-36ec1fe2aa40" containerName="extract-content" Oct 01 14:33:12 crc kubenswrapper[4605]: E1001 14:33:12.587304 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa48ae67-306f-4dfd-8b09-d633aa648286" containerName="collect-profiles" Oct 01 14:33:12 crc kubenswrapper[4605]: I1001 14:33:12.587310 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa48ae67-306f-4dfd-8b09-d633aa648286" containerName="collect-profiles" Oct 01 14:33:12 crc kubenswrapper[4605]: E1001 14:33:12.587334 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58600359-0fa8-4801-a1d3-87598ba13651" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Oct 01 14:33:12 crc kubenswrapper[4605]: I1001 14:33:12.587341 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="58600359-0fa8-4801-a1d3-87598ba13651" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Oct 01 14:33:12 crc kubenswrapper[4605]: E1001 14:33:12.587350 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1aee75b7-664c-423d-91f8-7cb48ab1273e" containerName="extract-content" Oct 01 14:33:12 crc kubenswrapper[4605]: I1001 14:33:12.587355 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="1aee75b7-664c-423d-91f8-7cb48ab1273e" containerName="extract-content" Oct 01 14:33:12 crc kubenswrapper[4605]: E1001 14:33:12.587369 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1aee75b7-664c-423d-91f8-7cb48ab1273e" containerName="extract-utilities" Oct 01 14:33:12 crc kubenswrapper[4605]: I1001 14:33:12.587375 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="1aee75b7-664c-423d-91f8-7cb48ab1273e" containerName="extract-utilities" Oct 01 14:33:12 crc kubenswrapper[4605]: E1001 14:33:12.587384 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97eab765-715a-4f0c-ab6d-36ec1fe2aa40" containerName="extract-utilities" Oct 01 14:33:12 crc kubenswrapper[4605]: I1001 14:33:12.587389 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="97eab765-715a-4f0c-ab6d-36ec1fe2aa40" containerName="extract-utilities" Oct 01 14:33:12 crc kubenswrapper[4605]: E1001 14:33:12.587398 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1aee75b7-664c-423d-91f8-7cb48ab1273e" containerName="registry-server" Oct 01 14:33:12 crc kubenswrapper[4605]: I1001 14:33:12.587404 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="1aee75b7-664c-423d-91f8-7cb48ab1273e" containerName="registry-server" Oct 01 14:33:12 crc kubenswrapper[4605]: I1001 14:33:12.587571 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="58600359-0fa8-4801-a1d3-87598ba13651" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Oct 01 14:33:12 crc kubenswrapper[4605]: I1001 14:33:12.587589 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="1aee75b7-664c-423d-91f8-7cb48ab1273e" containerName="registry-server" Oct 01 14:33:12 crc kubenswrapper[4605]: I1001 14:33:12.587601 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa48ae67-306f-4dfd-8b09-d633aa648286" containerName="collect-profiles" Oct 01 14:33:12 crc kubenswrapper[4605]: I1001 14:33:12.587618 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="97eab765-715a-4f0c-ab6d-36ec1fe2aa40" containerName="registry-server" Oct 01 14:33:12 crc kubenswrapper[4605]: I1001 14:33:12.588588 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Oct 01 14:33:12 crc kubenswrapper[4605]: I1001 14:33:12.590657 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-custom-data-s0" Oct 01 14:33:12 crc kubenswrapper[4605]: I1001 14:33:12.590709 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-dz8dz" Oct 01 14:33:12 crc kubenswrapper[4605]: I1001 14:33:12.591260 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"test-operator-controller-priv-key" Oct 01 14:33:12 crc kubenswrapper[4605]: I1001 14:33:12.591966 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Oct 01 14:33:12 crc kubenswrapper[4605]: I1001 14:33:12.612199 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Oct 01 14:33:12 crc kubenswrapper[4605]: I1001 14:33:12.716905 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2de51de5-4325-49f8-9179-f18e4de5fd46-config-data\") pod \"tempest-tests-tempest\" (UID: \"2de51de5-4325-49f8-9179-f18e4de5fd46\") " pod="openstack/tempest-tests-tempest" Oct 01 14:33:12 crc kubenswrapper[4605]: I1001 14:33:12.717225 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/2de51de5-4325-49f8-9179-f18e4de5fd46-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"2de51de5-4325-49f8-9179-f18e4de5fd46\") " pod="openstack/tempest-tests-tempest" Oct 01 14:33:12 crc kubenswrapper[4605]: I1001 14:33:12.717335 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/2de51de5-4325-49f8-9179-f18e4de5fd46-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"2de51de5-4325-49f8-9179-f18e4de5fd46\") " pod="openstack/tempest-tests-tempest" Oct 01 14:33:12 crc kubenswrapper[4605]: I1001 14:33:12.717411 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bt87d\" (UniqueName: \"kubernetes.io/projected/2de51de5-4325-49f8-9179-f18e4de5fd46-kube-api-access-bt87d\") pod \"tempest-tests-tempest\" (UID: \"2de51de5-4325-49f8-9179-f18e4de5fd46\") " pod="openstack/tempest-tests-tempest" Oct 01 14:33:12 crc kubenswrapper[4605]: I1001 14:33:12.717524 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2de51de5-4325-49f8-9179-f18e4de5fd46-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"2de51de5-4325-49f8-9179-f18e4de5fd46\") " pod="openstack/tempest-tests-tempest" Oct 01 14:33:12 crc kubenswrapper[4605]: I1001 14:33:12.717635 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"tempest-tests-tempest\" (UID: \"2de51de5-4325-49f8-9179-f18e4de5fd46\") " pod="openstack/tempest-tests-tempest" Oct 01 14:33:12 crc kubenswrapper[4605]: I1001 14:33:12.717775 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/2de51de5-4325-49f8-9179-f18e4de5fd46-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"2de51de5-4325-49f8-9179-f18e4de5fd46\") " pod="openstack/tempest-tests-tempest" Oct 01 14:33:12 crc kubenswrapper[4605]: I1001 14:33:12.717896 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/2de51de5-4325-49f8-9179-f18e4de5fd46-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"2de51de5-4325-49f8-9179-f18e4de5fd46\") " pod="openstack/tempest-tests-tempest" Oct 01 14:33:12 crc kubenswrapper[4605]: I1001 14:33:12.718058 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/2de51de5-4325-49f8-9179-f18e4de5fd46-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"2de51de5-4325-49f8-9179-f18e4de5fd46\") " pod="openstack/tempest-tests-tempest" Oct 01 14:33:12 crc kubenswrapper[4605]: I1001 14:33:12.819369 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2de51de5-4325-49f8-9179-f18e4de5fd46-config-data\") pod \"tempest-tests-tempest\" (UID: \"2de51de5-4325-49f8-9179-f18e4de5fd46\") " pod="openstack/tempest-tests-tempest" Oct 01 14:33:12 crc kubenswrapper[4605]: I1001 14:33:12.819421 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/2de51de5-4325-49f8-9179-f18e4de5fd46-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"2de51de5-4325-49f8-9179-f18e4de5fd46\") " pod="openstack/tempest-tests-tempest" Oct 01 14:33:12 crc kubenswrapper[4605]: I1001 14:33:12.819446 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/2de51de5-4325-49f8-9179-f18e4de5fd46-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"2de51de5-4325-49f8-9179-f18e4de5fd46\") " pod="openstack/tempest-tests-tempest" Oct 01 14:33:12 crc kubenswrapper[4605]: I1001 14:33:12.819463 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bt87d\" (UniqueName: \"kubernetes.io/projected/2de51de5-4325-49f8-9179-f18e4de5fd46-kube-api-access-bt87d\") pod \"tempest-tests-tempest\" (UID: \"2de51de5-4325-49f8-9179-f18e4de5fd46\") " pod="openstack/tempest-tests-tempest" Oct 01 14:33:12 crc kubenswrapper[4605]: I1001 14:33:12.819487 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2de51de5-4325-49f8-9179-f18e4de5fd46-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"2de51de5-4325-49f8-9179-f18e4de5fd46\") " pod="openstack/tempest-tests-tempest" Oct 01 14:33:12 crc kubenswrapper[4605]: I1001 14:33:12.819518 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"tempest-tests-tempest\" (UID: \"2de51de5-4325-49f8-9179-f18e4de5fd46\") " pod="openstack/tempest-tests-tempest" Oct 01 14:33:12 crc kubenswrapper[4605]: I1001 14:33:12.819562 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/2de51de5-4325-49f8-9179-f18e4de5fd46-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"2de51de5-4325-49f8-9179-f18e4de5fd46\") " pod="openstack/tempest-tests-tempest" Oct 01 14:33:12 crc kubenswrapper[4605]: I1001 14:33:12.819598 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/2de51de5-4325-49f8-9179-f18e4de5fd46-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"2de51de5-4325-49f8-9179-f18e4de5fd46\") " pod="openstack/tempest-tests-tempest" Oct 01 14:33:12 crc kubenswrapper[4605]: I1001 14:33:12.819620 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/2de51de5-4325-49f8-9179-f18e4de5fd46-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"2de51de5-4325-49f8-9179-f18e4de5fd46\") " pod="openstack/tempest-tests-tempest" Oct 01 14:33:12 crc kubenswrapper[4605]: I1001 14:33:12.820123 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/2de51de5-4325-49f8-9179-f18e4de5fd46-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"2de51de5-4325-49f8-9179-f18e4de5fd46\") " pod="openstack/tempest-tests-tempest" Oct 01 14:33:12 crc kubenswrapper[4605]: I1001 14:33:12.820433 4605 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"tempest-tests-tempest\" (UID: \"2de51de5-4325-49f8-9179-f18e4de5fd46\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/tempest-tests-tempest" Oct 01 14:33:12 crc kubenswrapper[4605]: I1001 14:33:12.821067 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2de51de5-4325-49f8-9179-f18e4de5fd46-config-data\") pod \"tempest-tests-tempest\" (UID: \"2de51de5-4325-49f8-9179-f18e4de5fd46\") " pod="openstack/tempest-tests-tempest" Oct 01 14:33:12 crc kubenswrapper[4605]: I1001 14:33:12.822006 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/2de51de5-4325-49f8-9179-f18e4de5fd46-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"2de51de5-4325-49f8-9179-f18e4de5fd46\") " pod="openstack/tempest-tests-tempest" Oct 01 14:33:12 crc kubenswrapper[4605]: I1001 14:33:12.826240 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/2de51de5-4325-49f8-9179-f18e4de5fd46-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"2de51de5-4325-49f8-9179-f18e4de5fd46\") " pod="openstack/tempest-tests-tempest" Oct 01 14:33:12 crc kubenswrapper[4605]: I1001 14:33:12.826635 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/2de51de5-4325-49f8-9179-f18e4de5fd46-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"2de51de5-4325-49f8-9179-f18e4de5fd46\") " pod="openstack/tempest-tests-tempest" Oct 01 14:33:12 crc kubenswrapper[4605]: I1001 14:33:12.826987 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2de51de5-4325-49f8-9179-f18e4de5fd46-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"2de51de5-4325-49f8-9179-f18e4de5fd46\") " pod="openstack/tempest-tests-tempest" Oct 01 14:33:12 crc kubenswrapper[4605]: I1001 14:33:12.832299 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/2de51de5-4325-49f8-9179-f18e4de5fd46-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"2de51de5-4325-49f8-9179-f18e4de5fd46\") " pod="openstack/tempest-tests-tempest" Oct 01 14:33:12 crc kubenswrapper[4605]: I1001 14:33:12.842776 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bt87d\" (UniqueName: \"kubernetes.io/projected/2de51de5-4325-49f8-9179-f18e4de5fd46-kube-api-access-bt87d\") pod \"tempest-tests-tempest\" (UID: \"2de51de5-4325-49f8-9179-f18e4de5fd46\") " pod="openstack/tempest-tests-tempest" Oct 01 14:33:12 crc kubenswrapper[4605]: I1001 14:33:12.853593 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"tempest-tests-tempest\" (UID: \"2de51de5-4325-49f8-9179-f18e4de5fd46\") " pod="openstack/tempest-tests-tempest" Oct 01 14:33:12 crc kubenswrapper[4605]: I1001 14:33:12.905861 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Oct 01 14:33:13 crc kubenswrapper[4605]: W1001 14:33:13.353558 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2de51de5_4325_49f8_9179_f18e4de5fd46.slice/crio-d6657e44c1d5f2cd9f61667290c34f092435b1198c3881d6e541540fd3eb1f27 WatchSource:0}: Error finding container d6657e44c1d5f2cd9f61667290c34f092435b1198c3881d6e541540fd3eb1f27: Status 404 returned error can't find the container with id d6657e44c1d5f2cd9f61667290c34f092435b1198c3881d6e541540fd3eb1f27 Oct 01 14:33:13 crc kubenswrapper[4605]: I1001 14:33:13.355154 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Oct 01 14:33:13 crc kubenswrapper[4605]: I1001 14:33:13.870698 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"2de51de5-4325-49f8-9179-f18e4de5fd46","Type":"ContainerStarted","Data":"d6657e44c1d5f2cd9f61667290c34f092435b1198c3881d6e541540fd3eb1f27"} Oct 01 14:33:16 crc kubenswrapper[4605]: I1001 14:33:16.426448 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-n8ltq"] Oct 01 14:33:16 crc kubenswrapper[4605]: I1001 14:33:16.431039 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-n8ltq" Oct 01 14:33:16 crc kubenswrapper[4605]: I1001 14:33:16.463801 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-n8ltq"] Oct 01 14:33:16 crc kubenswrapper[4605]: I1001 14:33:16.489520 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5crtv\" (UniqueName: \"kubernetes.io/projected/f8a60278-f4f7-4b95-90b4-99a70e1d6a2e-kube-api-access-5crtv\") pod \"community-operators-n8ltq\" (UID: \"f8a60278-f4f7-4b95-90b4-99a70e1d6a2e\") " pod="openshift-marketplace/community-operators-n8ltq" Oct 01 14:33:16 crc kubenswrapper[4605]: I1001 14:33:16.489622 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f8a60278-f4f7-4b95-90b4-99a70e1d6a2e-catalog-content\") pod \"community-operators-n8ltq\" (UID: \"f8a60278-f4f7-4b95-90b4-99a70e1d6a2e\") " pod="openshift-marketplace/community-operators-n8ltq" Oct 01 14:33:16 crc kubenswrapper[4605]: I1001 14:33:16.489726 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f8a60278-f4f7-4b95-90b4-99a70e1d6a2e-utilities\") pod \"community-operators-n8ltq\" (UID: \"f8a60278-f4f7-4b95-90b4-99a70e1d6a2e\") " pod="openshift-marketplace/community-operators-n8ltq" Oct 01 14:33:16 crc kubenswrapper[4605]: I1001 14:33:16.591372 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f8a60278-f4f7-4b95-90b4-99a70e1d6a2e-utilities\") pod \"community-operators-n8ltq\" (UID: \"f8a60278-f4f7-4b95-90b4-99a70e1d6a2e\") " pod="openshift-marketplace/community-operators-n8ltq" Oct 01 14:33:16 crc kubenswrapper[4605]: I1001 14:33:16.591443 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5crtv\" (UniqueName: \"kubernetes.io/projected/f8a60278-f4f7-4b95-90b4-99a70e1d6a2e-kube-api-access-5crtv\") pod \"community-operators-n8ltq\" (UID: \"f8a60278-f4f7-4b95-90b4-99a70e1d6a2e\") " pod="openshift-marketplace/community-operators-n8ltq" Oct 01 14:33:16 crc kubenswrapper[4605]: I1001 14:33:16.591537 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f8a60278-f4f7-4b95-90b4-99a70e1d6a2e-catalog-content\") pod \"community-operators-n8ltq\" (UID: \"f8a60278-f4f7-4b95-90b4-99a70e1d6a2e\") " pod="openshift-marketplace/community-operators-n8ltq" Oct 01 14:33:16 crc kubenswrapper[4605]: I1001 14:33:16.591891 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f8a60278-f4f7-4b95-90b4-99a70e1d6a2e-utilities\") pod \"community-operators-n8ltq\" (UID: \"f8a60278-f4f7-4b95-90b4-99a70e1d6a2e\") " pod="openshift-marketplace/community-operators-n8ltq" Oct 01 14:33:16 crc kubenswrapper[4605]: I1001 14:33:16.592049 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f8a60278-f4f7-4b95-90b4-99a70e1d6a2e-catalog-content\") pod \"community-operators-n8ltq\" (UID: \"f8a60278-f4f7-4b95-90b4-99a70e1d6a2e\") " pod="openshift-marketplace/community-operators-n8ltq" Oct 01 14:33:16 crc kubenswrapper[4605]: I1001 14:33:16.615485 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5crtv\" (UniqueName: \"kubernetes.io/projected/f8a60278-f4f7-4b95-90b4-99a70e1d6a2e-kube-api-access-5crtv\") pod \"community-operators-n8ltq\" (UID: \"f8a60278-f4f7-4b95-90b4-99a70e1d6a2e\") " pod="openshift-marketplace/community-operators-n8ltq" Oct 01 14:33:16 crc kubenswrapper[4605]: I1001 14:33:16.786544 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-n8ltq" Oct 01 14:33:19 crc kubenswrapper[4605]: I1001 14:33:19.409894 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-n8ltq"] Oct 01 14:33:19 crc kubenswrapper[4605]: I1001 14:33:19.955889 4605 generic.go:334] "Generic (PLEG): container finished" podID="f8a60278-f4f7-4b95-90b4-99a70e1d6a2e" containerID="0d7e96b6a66335581462301761d8d2cff9cdb0f02957b70eedcc7e19b27917c9" exitCode=0 Oct 01 14:33:19 crc kubenswrapper[4605]: I1001 14:33:19.955981 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n8ltq" event={"ID":"f8a60278-f4f7-4b95-90b4-99a70e1d6a2e","Type":"ContainerDied","Data":"0d7e96b6a66335581462301761d8d2cff9cdb0f02957b70eedcc7e19b27917c9"} Oct 01 14:33:19 crc kubenswrapper[4605]: I1001 14:33:19.956203 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n8ltq" event={"ID":"f8a60278-f4f7-4b95-90b4-99a70e1d6a2e","Type":"ContainerStarted","Data":"5f341b4181e8eb5a3a28b7647568318f52aec5fd5bfc70267b3e72a5413e4cd6"} Oct 01 14:33:21 crc kubenswrapper[4605]: I1001 14:33:21.631309 4605 patch_prober.go:28] interesting pod/machine-config-daemon-zdjh7 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 14:33:21 crc kubenswrapper[4605]: I1001 14:33:21.631715 4605 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 14:33:21 crc kubenswrapper[4605]: I1001 14:33:21.974967 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n8ltq" event={"ID":"f8a60278-f4f7-4b95-90b4-99a70e1d6a2e","Type":"ContainerStarted","Data":"b30486a0bd21609c5f2a5b866adffd5bda7ddf5f96198999ec02918747ee4c11"} Oct 01 14:33:23 crc kubenswrapper[4605]: I1001 14:33:23.997606 4605 generic.go:334] "Generic (PLEG): container finished" podID="f8a60278-f4f7-4b95-90b4-99a70e1d6a2e" containerID="b30486a0bd21609c5f2a5b866adffd5bda7ddf5f96198999ec02918747ee4c11" exitCode=0 Oct 01 14:33:23 crc kubenswrapper[4605]: I1001 14:33:23.997705 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n8ltq" event={"ID":"f8a60278-f4f7-4b95-90b4-99a70e1d6a2e","Type":"ContainerDied","Data":"b30486a0bd21609c5f2a5b866adffd5bda7ddf5f96198999ec02918747ee4c11"} Oct 01 14:33:50 crc kubenswrapper[4605]: E1001 14:33:50.359065 4605 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified" Oct 01 14:33:50 crc kubenswrapper[4605]: E1001 14:33:50.362283 4605 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:tempest-tests-tempest-tests-runner,Image:quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:test-operator-ephemeral-workdir,ReadOnly:false,MountPath:/var/lib/tempest,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-temporary,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:false,MountPath:/etc/test_operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-logs,ReadOnly:false,MountPath:/var/lib/tempest/external_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/etc/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/var/lib/tempest/.config/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/etc/openstack/secure.yaml,SubPath:secure.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ca-certs,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ssh-key,ReadOnly:false,MountPath:/var/lib/tempest/id_ecdsa,SubPath:ssh_key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-bt87d,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42480,RunAsNonRoot:*false,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:*true,RunAsGroup:*42480,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-custom-data-s0,},Optional:nil,},SecretRef:nil,},EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-env-vars-s0,},Optional:nil,},SecretRef:nil,},},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod tempest-tests-tempest_openstack(2de51de5-4325-49f8-9179-f18e4de5fd46): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 01 14:33:50 crc kubenswrapper[4605]: E1001 14:33:50.363576 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/tempest-tests-tempest" podUID="2de51de5-4325-49f8-9179-f18e4de5fd46" Oct 01 14:33:51 crc kubenswrapper[4605]: I1001 14:33:51.287576 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n8ltq" event={"ID":"f8a60278-f4f7-4b95-90b4-99a70e1d6a2e","Type":"ContainerStarted","Data":"3ab37173bc4eace60cace8f96efa4161dc6af8f0c5d53a582c8564f4b2fcdabc"} Oct 01 14:33:51 crc kubenswrapper[4605]: E1001 14:33:51.288688 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified\\\"\"" pod="openstack/tempest-tests-tempest" podUID="2de51de5-4325-49f8-9179-f18e4de5fd46" Oct 01 14:33:51 crc kubenswrapper[4605]: I1001 14:33:51.335758 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-n8ltq" podStartSLOduration=5.017624769 podStartE2EDuration="35.335739511s" podCreationTimestamp="2025-10-01 14:33:16 +0000 UTC" firstStartedPulling="2025-10-01 14:33:19.958038581 +0000 UTC m=+2922.702014789" lastFinishedPulling="2025-10-01 14:33:50.276153283 +0000 UTC m=+2953.020129531" observedRunningTime="2025-10-01 14:33:51.333182017 +0000 UTC m=+2954.077158245" watchObservedRunningTime="2025-10-01 14:33:51.335739511 +0000 UTC m=+2954.079715719" Oct 01 14:33:51 crc kubenswrapper[4605]: I1001 14:33:51.630790 4605 patch_prober.go:28] interesting pod/machine-config-daemon-zdjh7 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 14:33:51 crc kubenswrapper[4605]: I1001 14:33:51.630850 4605 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 14:33:56 crc kubenswrapper[4605]: I1001 14:33:56.787117 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-n8ltq" Oct 01 14:33:56 crc kubenswrapper[4605]: I1001 14:33:56.787683 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-n8ltq" Oct 01 14:33:57 crc kubenswrapper[4605]: I1001 14:33:57.833932 4605 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-n8ltq" podUID="f8a60278-f4f7-4b95-90b4-99a70e1d6a2e" containerName="registry-server" probeResult="failure" output=< Oct 01 14:33:57 crc kubenswrapper[4605]: timeout: failed to connect service ":50051" within 1s Oct 01 14:33:57 crc kubenswrapper[4605]: > Oct 01 14:34:01 crc kubenswrapper[4605]: I1001 14:34:01.679147 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-qrsw7"] Oct 01 14:34:01 crc kubenswrapper[4605]: I1001 14:34:01.681691 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qrsw7" Oct 01 14:34:01 crc kubenswrapper[4605]: I1001 14:34:01.697483 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qrsw7"] Oct 01 14:34:01 crc kubenswrapper[4605]: I1001 14:34:01.793398 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a3ab2920-c567-4058-8b11-eefe7839f8be-utilities\") pod \"certified-operators-qrsw7\" (UID: \"a3ab2920-c567-4058-8b11-eefe7839f8be\") " pod="openshift-marketplace/certified-operators-qrsw7" Oct 01 14:34:01 crc kubenswrapper[4605]: I1001 14:34:01.793507 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a3ab2920-c567-4058-8b11-eefe7839f8be-catalog-content\") pod \"certified-operators-qrsw7\" (UID: \"a3ab2920-c567-4058-8b11-eefe7839f8be\") " pod="openshift-marketplace/certified-operators-qrsw7" Oct 01 14:34:01 crc kubenswrapper[4605]: I1001 14:34:01.793603 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wfg2k\" (UniqueName: \"kubernetes.io/projected/a3ab2920-c567-4058-8b11-eefe7839f8be-kube-api-access-wfg2k\") pod \"certified-operators-qrsw7\" (UID: \"a3ab2920-c567-4058-8b11-eefe7839f8be\") " pod="openshift-marketplace/certified-operators-qrsw7" Oct 01 14:34:01 crc kubenswrapper[4605]: I1001 14:34:01.895235 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a3ab2920-c567-4058-8b11-eefe7839f8be-utilities\") pod \"certified-operators-qrsw7\" (UID: \"a3ab2920-c567-4058-8b11-eefe7839f8be\") " pod="openshift-marketplace/certified-operators-qrsw7" Oct 01 14:34:01 crc kubenswrapper[4605]: I1001 14:34:01.895616 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a3ab2920-c567-4058-8b11-eefe7839f8be-catalog-content\") pod \"certified-operators-qrsw7\" (UID: \"a3ab2920-c567-4058-8b11-eefe7839f8be\") " pod="openshift-marketplace/certified-operators-qrsw7" Oct 01 14:34:01 crc kubenswrapper[4605]: I1001 14:34:01.895824 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a3ab2920-c567-4058-8b11-eefe7839f8be-utilities\") pod \"certified-operators-qrsw7\" (UID: \"a3ab2920-c567-4058-8b11-eefe7839f8be\") " pod="openshift-marketplace/certified-operators-qrsw7" Oct 01 14:34:01 crc kubenswrapper[4605]: I1001 14:34:01.895943 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wfg2k\" (UniqueName: \"kubernetes.io/projected/a3ab2920-c567-4058-8b11-eefe7839f8be-kube-api-access-wfg2k\") pod \"certified-operators-qrsw7\" (UID: \"a3ab2920-c567-4058-8b11-eefe7839f8be\") " pod="openshift-marketplace/certified-operators-qrsw7" Oct 01 14:34:01 crc kubenswrapper[4605]: I1001 14:34:01.896184 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a3ab2920-c567-4058-8b11-eefe7839f8be-catalog-content\") pod \"certified-operators-qrsw7\" (UID: \"a3ab2920-c567-4058-8b11-eefe7839f8be\") " pod="openshift-marketplace/certified-operators-qrsw7" Oct 01 14:34:01 crc kubenswrapper[4605]: I1001 14:34:01.932983 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wfg2k\" (UniqueName: \"kubernetes.io/projected/a3ab2920-c567-4058-8b11-eefe7839f8be-kube-api-access-wfg2k\") pod \"certified-operators-qrsw7\" (UID: \"a3ab2920-c567-4058-8b11-eefe7839f8be\") " pod="openshift-marketplace/certified-operators-qrsw7" Oct 01 14:34:02 crc kubenswrapper[4605]: I1001 14:34:02.017719 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qrsw7" Oct 01 14:34:02 crc kubenswrapper[4605]: I1001 14:34:02.536545 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qrsw7"] Oct 01 14:34:03 crc kubenswrapper[4605]: I1001 14:34:03.385591 4605 generic.go:334] "Generic (PLEG): container finished" podID="a3ab2920-c567-4058-8b11-eefe7839f8be" containerID="e501079c09a7da7e89d925ba87b4700a21e85211b60effead15b6e31f01a2976" exitCode=0 Oct 01 14:34:03 crc kubenswrapper[4605]: I1001 14:34:03.385723 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qrsw7" event={"ID":"a3ab2920-c567-4058-8b11-eefe7839f8be","Type":"ContainerDied","Data":"e501079c09a7da7e89d925ba87b4700a21e85211b60effead15b6e31f01a2976"} Oct 01 14:34:03 crc kubenswrapper[4605]: I1001 14:34:03.385914 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qrsw7" event={"ID":"a3ab2920-c567-4058-8b11-eefe7839f8be","Type":"ContainerStarted","Data":"2b7bc71c2251d46628668b874410338ecec4eb50d6fe2efb08a5c6ff2903991e"} Oct 01 14:34:03 crc kubenswrapper[4605]: I1001 14:34:03.477747 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Oct 01 14:34:04 crc kubenswrapper[4605]: I1001 14:34:04.400348 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qrsw7" event={"ID":"a3ab2920-c567-4058-8b11-eefe7839f8be","Type":"ContainerStarted","Data":"8c601107e04fb3d5124cf699d44421536ab44446dd966df597cd64f459ae45d9"} Oct 01 14:34:05 crc kubenswrapper[4605]: I1001 14:34:05.413034 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"2de51de5-4325-49f8-9179-f18e4de5fd46","Type":"ContainerStarted","Data":"f4f06a811e54fd683e04effb7ea99e6e5cf2f7d720d666a1778ad4a875b25bf4"} Oct 01 14:34:05 crc kubenswrapper[4605]: I1001 14:34:05.443627 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tempest-tests-tempest" podStartSLOduration=4.325660466 podStartE2EDuration="54.443604396s" podCreationTimestamp="2025-10-01 14:33:11 +0000 UTC" firstStartedPulling="2025-10-01 14:33:13.356037213 +0000 UTC m=+2916.100013421" lastFinishedPulling="2025-10-01 14:34:03.473981143 +0000 UTC m=+2966.217957351" observedRunningTime="2025-10-01 14:34:05.438459847 +0000 UTC m=+2968.182436055" watchObservedRunningTime="2025-10-01 14:34:05.443604396 +0000 UTC m=+2968.187580604" Oct 01 14:34:06 crc kubenswrapper[4605]: I1001 14:34:06.427861 4605 generic.go:334] "Generic (PLEG): container finished" podID="a3ab2920-c567-4058-8b11-eefe7839f8be" containerID="8c601107e04fb3d5124cf699d44421536ab44446dd966df597cd64f459ae45d9" exitCode=0 Oct 01 14:34:06 crc kubenswrapper[4605]: I1001 14:34:06.428387 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qrsw7" event={"ID":"a3ab2920-c567-4058-8b11-eefe7839f8be","Type":"ContainerDied","Data":"8c601107e04fb3d5124cf699d44421536ab44446dd966df597cd64f459ae45d9"} Oct 01 14:34:06 crc kubenswrapper[4605]: I1001 14:34:06.831335 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-n8ltq" Oct 01 14:34:06 crc kubenswrapper[4605]: I1001 14:34:06.875574 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-n8ltq" Oct 01 14:34:07 crc kubenswrapper[4605]: I1001 14:34:07.441475 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qrsw7" event={"ID":"a3ab2920-c567-4058-8b11-eefe7839f8be","Type":"ContainerStarted","Data":"311f8a959e7e259db6f31bc326994122c990ac636301dab6c47d8ee7dcfe5b7b"} Oct 01 14:34:07 crc kubenswrapper[4605]: I1001 14:34:07.464610 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-qrsw7" podStartSLOduration=2.937242559 podStartE2EDuration="6.464586172s" podCreationTimestamp="2025-10-01 14:34:01 +0000 UTC" firstStartedPulling="2025-10-01 14:34:03.387225981 +0000 UTC m=+2966.131202189" lastFinishedPulling="2025-10-01 14:34:06.914569594 +0000 UTC m=+2969.658545802" observedRunningTime="2025-10-01 14:34:07.461401692 +0000 UTC m=+2970.205377910" watchObservedRunningTime="2025-10-01 14:34:07.464586172 +0000 UTC m=+2970.208562380" Oct 01 14:34:08 crc kubenswrapper[4605]: I1001 14:34:08.456703 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-n8ltq"] Oct 01 14:34:08 crc kubenswrapper[4605]: I1001 14:34:08.456942 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-n8ltq" podUID="f8a60278-f4f7-4b95-90b4-99a70e1d6a2e" containerName="registry-server" containerID="cri-o://3ab37173bc4eace60cace8f96efa4161dc6af8f0c5d53a582c8564f4b2fcdabc" gracePeriod=2 Oct 01 14:34:08 crc kubenswrapper[4605]: I1001 14:34:08.925335 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-n8ltq" Oct 01 14:34:08 crc kubenswrapper[4605]: I1001 14:34:08.974683 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5crtv\" (UniqueName: \"kubernetes.io/projected/f8a60278-f4f7-4b95-90b4-99a70e1d6a2e-kube-api-access-5crtv\") pod \"f8a60278-f4f7-4b95-90b4-99a70e1d6a2e\" (UID: \"f8a60278-f4f7-4b95-90b4-99a70e1d6a2e\") " Oct 01 14:34:08 crc kubenswrapper[4605]: I1001 14:34:08.974910 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f8a60278-f4f7-4b95-90b4-99a70e1d6a2e-catalog-content\") pod \"f8a60278-f4f7-4b95-90b4-99a70e1d6a2e\" (UID: \"f8a60278-f4f7-4b95-90b4-99a70e1d6a2e\") " Oct 01 14:34:08 crc kubenswrapper[4605]: I1001 14:34:08.974996 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f8a60278-f4f7-4b95-90b4-99a70e1d6a2e-utilities\") pod \"f8a60278-f4f7-4b95-90b4-99a70e1d6a2e\" (UID: \"f8a60278-f4f7-4b95-90b4-99a70e1d6a2e\") " Oct 01 14:34:08 crc kubenswrapper[4605]: I1001 14:34:08.980732 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f8a60278-f4f7-4b95-90b4-99a70e1d6a2e-utilities" (OuterVolumeSpecName: "utilities") pod "f8a60278-f4f7-4b95-90b4-99a70e1d6a2e" (UID: "f8a60278-f4f7-4b95-90b4-99a70e1d6a2e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:34:08 crc kubenswrapper[4605]: I1001 14:34:08.981068 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f8a60278-f4f7-4b95-90b4-99a70e1d6a2e-kube-api-access-5crtv" (OuterVolumeSpecName: "kube-api-access-5crtv") pod "f8a60278-f4f7-4b95-90b4-99a70e1d6a2e" (UID: "f8a60278-f4f7-4b95-90b4-99a70e1d6a2e"). InnerVolumeSpecName "kube-api-access-5crtv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:34:09 crc kubenswrapper[4605]: I1001 14:34:09.021870 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f8a60278-f4f7-4b95-90b4-99a70e1d6a2e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f8a60278-f4f7-4b95-90b4-99a70e1d6a2e" (UID: "f8a60278-f4f7-4b95-90b4-99a70e1d6a2e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:34:09 crc kubenswrapper[4605]: I1001 14:34:09.077944 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5crtv\" (UniqueName: \"kubernetes.io/projected/f8a60278-f4f7-4b95-90b4-99a70e1d6a2e-kube-api-access-5crtv\") on node \"crc\" DevicePath \"\"" Oct 01 14:34:09 crc kubenswrapper[4605]: I1001 14:34:09.078220 4605 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f8a60278-f4f7-4b95-90b4-99a70e1d6a2e-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 14:34:09 crc kubenswrapper[4605]: I1001 14:34:09.078291 4605 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f8a60278-f4f7-4b95-90b4-99a70e1d6a2e-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 14:34:09 crc kubenswrapper[4605]: I1001 14:34:09.467950 4605 generic.go:334] "Generic (PLEG): container finished" podID="f8a60278-f4f7-4b95-90b4-99a70e1d6a2e" containerID="3ab37173bc4eace60cace8f96efa4161dc6af8f0c5d53a582c8564f4b2fcdabc" exitCode=0 Oct 01 14:34:09 crc kubenswrapper[4605]: I1001 14:34:09.468009 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n8ltq" event={"ID":"f8a60278-f4f7-4b95-90b4-99a70e1d6a2e","Type":"ContainerDied","Data":"3ab37173bc4eace60cace8f96efa4161dc6af8f0c5d53a582c8564f4b2fcdabc"} Oct 01 14:34:09 crc kubenswrapper[4605]: I1001 14:34:09.468024 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-n8ltq" Oct 01 14:34:09 crc kubenswrapper[4605]: I1001 14:34:09.468059 4605 scope.go:117] "RemoveContainer" containerID="3ab37173bc4eace60cace8f96efa4161dc6af8f0c5d53a582c8564f4b2fcdabc" Oct 01 14:34:09 crc kubenswrapper[4605]: I1001 14:34:09.468036 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n8ltq" event={"ID":"f8a60278-f4f7-4b95-90b4-99a70e1d6a2e","Type":"ContainerDied","Data":"5f341b4181e8eb5a3a28b7647568318f52aec5fd5bfc70267b3e72a5413e4cd6"} Oct 01 14:34:09 crc kubenswrapper[4605]: I1001 14:34:09.503324 4605 scope.go:117] "RemoveContainer" containerID="b30486a0bd21609c5f2a5b866adffd5bda7ddf5f96198999ec02918747ee4c11" Oct 01 14:34:09 crc kubenswrapper[4605]: I1001 14:34:09.508765 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-n8ltq"] Oct 01 14:34:09 crc kubenswrapper[4605]: I1001 14:34:09.520358 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-n8ltq"] Oct 01 14:34:09 crc kubenswrapper[4605]: I1001 14:34:09.539636 4605 scope.go:117] "RemoveContainer" containerID="0d7e96b6a66335581462301761d8d2cff9cdb0f02957b70eedcc7e19b27917c9" Oct 01 14:34:09 crc kubenswrapper[4605]: I1001 14:34:09.585975 4605 scope.go:117] "RemoveContainer" containerID="3ab37173bc4eace60cace8f96efa4161dc6af8f0c5d53a582c8564f4b2fcdabc" Oct 01 14:34:09 crc kubenswrapper[4605]: E1001 14:34:09.588048 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3ab37173bc4eace60cace8f96efa4161dc6af8f0c5d53a582c8564f4b2fcdabc\": container with ID starting with 3ab37173bc4eace60cace8f96efa4161dc6af8f0c5d53a582c8564f4b2fcdabc not found: ID does not exist" containerID="3ab37173bc4eace60cace8f96efa4161dc6af8f0c5d53a582c8564f4b2fcdabc" Oct 01 14:34:09 crc kubenswrapper[4605]: I1001 14:34:09.588114 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3ab37173bc4eace60cace8f96efa4161dc6af8f0c5d53a582c8564f4b2fcdabc"} err="failed to get container status \"3ab37173bc4eace60cace8f96efa4161dc6af8f0c5d53a582c8564f4b2fcdabc\": rpc error: code = NotFound desc = could not find container \"3ab37173bc4eace60cace8f96efa4161dc6af8f0c5d53a582c8564f4b2fcdabc\": container with ID starting with 3ab37173bc4eace60cace8f96efa4161dc6af8f0c5d53a582c8564f4b2fcdabc not found: ID does not exist" Oct 01 14:34:09 crc kubenswrapper[4605]: I1001 14:34:09.588149 4605 scope.go:117] "RemoveContainer" containerID="b30486a0bd21609c5f2a5b866adffd5bda7ddf5f96198999ec02918747ee4c11" Oct 01 14:34:09 crc kubenswrapper[4605]: E1001 14:34:09.588588 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b30486a0bd21609c5f2a5b866adffd5bda7ddf5f96198999ec02918747ee4c11\": container with ID starting with b30486a0bd21609c5f2a5b866adffd5bda7ddf5f96198999ec02918747ee4c11 not found: ID does not exist" containerID="b30486a0bd21609c5f2a5b866adffd5bda7ddf5f96198999ec02918747ee4c11" Oct 01 14:34:09 crc kubenswrapper[4605]: I1001 14:34:09.588630 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b30486a0bd21609c5f2a5b866adffd5bda7ddf5f96198999ec02918747ee4c11"} err="failed to get container status \"b30486a0bd21609c5f2a5b866adffd5bda7ddf5f96198999ec02918747ee4c11\": rpc error: code = NotFound desc = could not find container \"b30486a0bd21609c5f2a5b866adffd5bda7ddf5f96198999ec02918747ee4c11\": container with ID starting with b30486a0bd21609c5f2a5b866adffd5bda7ddf5f96198999ec02918747ee4c11 not found: ID does not exist" Oct 01 14:34:09 crc kubenswrapper[4605]: I1001 14:34:09.588660 4605 scope.go:117] "RemoveContainer" containerID="0d7e96b6a66335581462301761d8d2cff9cdb0f02957b70eedcc7e19b27917c9" Oct 01 14:34:09 crc kubenswrapper[4605]: E1001 14:34:09.589003 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0d7e96b6a66335581462301761d8d2cff9cdb0f02957b70eedcc7e19b27917c9\": container with ID starting with 0d7e96b6a66335581462301761d8d2cff9cdb0f02957b70eedcc7e19b27917c9 not found: ID does not exist" containerID="0d7e96b6a66335581462301761d8d2cff9cdb0f02957b70eedcc7e19b27917c9" Oct 01 14:34:09 crc kubenswrapper[4605]: I1001 14:34:09.589045 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0d7e96b6a66335581462301761d8d2cff9cdb0f02957b70eedcc7e19b27917c9"} err="failed to get container status \"0d7e96b6a66335581462301761d8d2cff9cdb0f02957b70eedcc7e19b27917c9\": rpc error: code = NotFound desc = could not find container \"0d7e96b6a66335581462301761d8d2cff9cdb0f02957b70eedcc7e19b27917c9\": container with ID starting with 0d7e96b6a66335581462301761d8d2cff9cdb0f02957b70eedcc7e19b27917c9 not found: ID does not exist" Oct 01 14:34:09 crc kubenswrapper[4605]: I1001 14:34:09.942050 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f8a60278-f4f7-4b95-90b4-99a70e1d6a2e" path="/var/lib/kubelet/pods/f8a60278-f4f7-4b95-90b4-99a70e1d6a2e/volumes" Oct 01 14:34:12 crc kubenswrapper[4605]: I1001 14:34:12.018051 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-qrsw7" Oct 01 14:34:12 crc kubenswrapper[4605]: I1001 14:34:12.019497 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-qrsw7" Oct 01 14:34:12 crc kubenswrapper[4605]: I1001 14:34:12.067000 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-qrsw7" Oct 01 14:34:12 crc kubenswrapper[4605]: I1001 14:34:12.575298 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-qrsw7" Oct 01 14:34:13 crc kubenswrapper[4605]: I1001 14:34:13.245879 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-qrsw7"] Oct 01 14:34:14 crc kubenswrapper[4605]: I1001 14:34:14.522808 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-qrsw7" podUID="a3ab2920-c567-4058-8b11-eefe7839f8be" containerName="registry-server" containerID="cri-o://311f8a959e7e259db6f31bc326994122c990ac636301dab6c47d8ee7dcfe5b7b" gracePeriod=2 Oct 01 14:34:15 crc kubenswrapper[4605]: I1001 14:34:15.080490 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qrsw7" Oct 01 14:34:15 crc kubenswrapper[4605]: I1001 14:34:15.194144 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wfg2k\" (UniqueName: \"kubernetes.io/projected/a3ab2920-c567-4058-8b11-eefe7839f8be-kube-api-access-wfg2k\") pod \"a3ab2920-c567-4058-8b11-eefe7839f8be\" (UID: \"a3ab2920-c567-4058-8b11-eefe7839f8be\") " Oct 01 14:34:15 crc kubenswrapper[4605]: I1001 14:34:15.194270 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a3ab2920-c567-4058-8b11-eefe7839f8be-catalog-content\") pod \"a3ab2920-c567-4058-8b11-eefe7839f8be\" (UID: \"a3ab2920-c567-4058-8b11-eefe7839f8be\") " Oct 01 14:34:15 crc kubenswrapper[4605]: I1001 14:34:15.194393 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a3ab2920-c567-4058-8b11-eefe7839f8be-utilities\") pod \"a3ab2920-c567-4058-8b11-eefe7839f8be\" (UID: \"a3ab2920-c567-4058-8b11-eefe7839f8be\") " Oct 01 14:34:15 crc kubenswrapper[4605]: I1001 14:34:15.195187 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a3ab2920-c567-4058-8b11-eefe7839f8be-utilities" (OuterVolumeSpecName: "utilities") pod "a3ab2920-c567-4058-8b11-eefe7839f8be" (UID: "a3ab2920-c567-4058-8b11-eefe7839f8be"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:34:15 crc kubenswrapper[4605]: I1001 14:34:15.203359 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a3ab2920-c567-4058-8b11-eefe7839f8be-kube-api-access-wfg2k" (OuterVolumeSpecName: "kube-api-access-wfg2k") pod "a3ab2920-c567-4058-8b11-eefe7839f8be" (UID: "a3ab2920-c567-4058-8b11-eefe7839f8be"). InnerVolumeSpecName "kube-api-access-wfg2k". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:34:15 crc kubenswrapper[4605]: I1001 14:34:15.242189 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a3ab2920-c567-4058-8b11-eefe7839f8be-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a3ab2920-c567-4058-8b11-eefe7839f8be" (UID: "a3ab2920-c567-4058-8b11-eefe7839f8be"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:34:15 crc kubenswrapper[4605]: I1001 14:34:15.297056 4605 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a3ab2920-c567-4058-8b11-eefe7839f8be-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 14:34:15 crc kubenswrapper[4605]: I1001 14:34:15.297112 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wfg2k\" (UniqueName: \"kubernetes.io/projected/a3ab2920-c567-4058-8b11-eefe7839f8be-kube-api-access-wfg2k\") on node \"crc\" DevicePath \"\"" Oct 01 14:34:15 crc kubenswrapper[4605]: I1001 14:34:15.297128 4605 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a3ab2920-c567-4058-8b11-eefe7839f8be-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 14:34:15 crc kubenswrapper[4605]: I1001 14:34:15.533755 4605 generic.go:334] "Generic (PLEG): container finished" podID="a3ab2920-c567-4058-8b11-eefe7839f8be" containerID="311f8a959e7e259db6f31bc326994122c990ac636301dab6c47d8ee7dcfe5b7b" exitCode=0 Oct 01 14:34:15 crc kubenswrapper[4605]: I1001 14:34:15.533799 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qrsw7" event={"ID":"a3ab2920-c567-4058-8b11-eefe7839f8be","Type":"ContainerDied","Data":"311f8a959e7e259db6f31bc326994122c990ac636301dab6c47d8ee7dcfe5b7b"} Oct 01 14:34:15 crc kubenswrapper[4605]: I1001 14:34:15.533815 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qrsw7" Oct 01 14:34:15 crc kubenswrapper[4605]: I1001 14:34:15.533827 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qrsw7" event={"ID":"a3ab2920-c567-4058-8b11-eefe7839f8be","Type":"ContainerDied","Data":"2b7bc71c2251d46628668b874410338ecec4eb50d6fe2efb08a5c6ff2903991e"} Oct 01 14:34:15 crc kubenswrapper[4605]: I1001 14:34:15.533845 4605 scope.go:117] "RemoveContainer" containerID="311f8a959e7e259db6f31bc326994122c990ac636301dab6c47d8ee7dcfe5b7b" Oct 01 14:34:15 crc kubenswrapper[4605]: I1001 14:34:15.563718 4605 scope.go:117] "RemoveContainer" containerID="8c601107e04fb3d5124cf699d44421536ab44446dd966df597cd64f459ae45d9" Oct 01 14:34:15 crc kubenswrapper[4605]: I1001 14:34:15.570810 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-qrsw7"] Oct 01 14:34:15 crc kubenswrapper[4605]: I1001 14:34:15.579762 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-qrsw7"] Oct 01 14:34:15 crc kubenswrapper[4605]: I1001 14:34:15.590902 4605 scope.go:117] "RemoveContainer" containerID="e501079c09a7da7e89d925ba87b4700a21e85211b60effead15b6e31f01a2976" Oct 01 14:34:15 crc kubenswrapper[4605]: I1001 14:34:15.633144 4605 scope.go:117] "RemoveContainer" containerID="311f8a959e7e259db6f31bc326994122c990ac636301dab6c47d8ee7dcfe5b7b" Oct 01 14:34:15 crc kubenswrapper[4605]: E1001 14:34:15.633830 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"311f8a959e7e259db6f31bc326994122c990ac636301dab6c47d8ee7dcfe5b7b\": container with ID starting with 311f8a959e7e259db6f31bc326994122c990ac636301dab6c47d8ee7dcfe5b7b not found: ID does not exist" containerID="311f8a959e7e259db6f31bc326994122c990ac636301dab6c47d8ee7dcfe5b7b" Oct 01 14:34:15 crc kubenswrapper[4605]: I1001 14:34:15.634041 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"311f8a959e7e259db6f31bc326994122c990ac636301dab6c47d8ee7dcfe5b7b"} err="failed to get container status \"311f8a959e7e259db6f31bc326994122c990ac636301dab6c47d8ee7dcfe5b7b\": rpc error: code = NotFound desc = could not find container \"311f8a959e7e259db6f31bc326994122c990ac636301dab6c47d8ee7dcfe5b7b\": container with ID starting with 311f8a959e7e259db6f31bc326994122c990ac636301dab6c47d8ee7dcfe5b7b not found: ID does not exist" Oct 01 14:34:15 crc kubenswrapper[4605]: I1001 14:34:15.634162 4605 scope.go:117] "RemoveContainer" containerID="8c601107e04fb3d5124cf699d44421536ab44446dd966df597cd64f459ae45d9" Oct 01 14:34:15 crc kubenswrapper[4605]: E1001 14:34:15.634827 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8c601107e04fb3d5124cf699d44421536ab44446dd966df597cd64f459ae45d9\": container with ID starting with 8c601107e04fb3d5124cf699d44421536ab44446dd966df597cd64f459ae45d9 not found: ID does not exist" containerID="8c601107e04fb3d5124cf699d44421536ab44446dd966df597cd64f459ae45d9" Oct 01 14:34:15 crc kubenswrapper[4605]: I1001 14:34:15.634901 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8c601107e04fb3d5124cf699d44421536ab44446dd966df597cd64f459ae45d9"} err="failed to get container status \"8c601107e04fb3d5124cf699d44421536ab44446dd966df597cd64f459ae45d9\": rpc error: code = NotFound desc = could not find container \"8c601107e04fb3d5124cf699d44421536ab44446dd966df597cd64f459ae45d9\": container with ID starting with 8c601107e04fb3d5124cf699d44421536ab44446dd966df597cd64f459ae45d9 not found: ID does not exist" Oct 01 14:34:15 crc kubenswrapper[4605]: I1001 14:34:15.634935 4605 scope.go:117] "RemoveContainer" containerID="e501079c09a7da7e89d925ba87b4700a21e85211b60effead15b6e31f01a2976" Oct 01 14:34:15 crc kubenswrapper[4605]: E1001 14:34:15.635431 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e501079c09a7da7e89d925ba87b4700a21e85211b60effead15b6e31f01a2976\": container with ID starting with e501079c09a7da7e89d925ba87b4700a21e85211b60effead15b6e31f01a2976 not found: ID does not exist" containerID="e501079c09a7da7e89d925ba87b4700a21e85211b60effead15b6e31f01a2976" Oct 01 14:34:15 crc kubenswrapper[4605]: I1001 14:34:15.635467 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e501079c09a7da7e89d925ba87b4700a21e85211b60effead15b6e31f01a2976"} err="failed to get container status \"e501079c09a7da7e89d925ba87b4700a21e85211b60effead15b6e31f01a2976\": rpc error: code = NotFound desc = could not find container \"e501079c09a7da7e89d925ba87b4700a21e85211b60effead15b6e31f01a2976\": container with ID starting with e501079c09a7da7e89d925ba87b4700a21e85211b60effead15b6e31f01a2976 not found: ID does not exist" Oct 01 14:34:15 crc kubenswrapper[4605]: I1001 14:34:15.937337 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a3ab2920-c567-4058-8b11-eefe7839f8be" path="/var/lib/kubelet/pods/a3ab2920-c567-4058-8b11-eefe7839f8be/volumes" Oct 01 14:34:21 crc kubenswrapper[4605]: I1001 14:34:21.630825 4605 patch_prober.go:28] interesting pod/machine-config-daemon-zdjh7 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 14:34:21 crc kubenswrapper[4605]: I1001 14:34:21.631289 4605 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 14:34:21 crc kubenswrapper[4605]: I1001 14:34:21.631339 4605 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" Oct 01 14:34:21 crc kubenswrapper[4605]: I1001 14:34:21.632076 4605 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"65c5b0f77ed3d625b4751ed14a86e021ac1ed482b076d853d9e9fd4b1ec77e97"} pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 01 14:34:21 crc kubenswrapper[4605]: I1001 14:34:21.632142 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" containerName="machine-config-daemon" containerID="cri-o://65c5b0f77ed3d625b4751ed14a86e021ac1ed482b076d853d9e9fd4b1ec77e97" gracePeriod=600 Oct 01 14:34:21 crc kubenswrapper[4605]: E1001 14:34:21.751766 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:34:22 crc kubenswrapper[4605]: I1001 14:34:22.603566 4605 generic.go:334] "Generic (PLEG): container finished" podID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" containerID="65c5b0f77ed3d625b4751ed14a86e021ac1ed482b076d853d9e9fd4b1ec77e97" exitCode=0 Oct 01 14:34:22 crc kubenswrapper[4605]: I1001 14:34:22.603629 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" event={"ID":"f3023060-c8ae-492b-b1cb-a418d9a8e59f","Type":"ContainerDied","Data":"65c5b0f77ed3d625b4751ed14a86e021ac1ed482b076d853d9e9fd4b1ec77e97"} Oct 01 14:34:22 crc kubenswrapper[4605]: I1001 14:34:22.603676 4605 scope.go:117] "RemoveContainer" containerID="2b12842fc21dc955a6e9c6b86d51ef221646a4c444d7a262b1bedd2def659168" Oct 01 14:34:22 crc kubenswrapper[4605]: I1001 14:34:22.604465 4605 scope.go:117] "RemoveContainer" containerID="65c5b0f77ed3d625b4751ed14a86e021ac1ed482b076d853d9e9fd4b1ec77e97" Oct 01 14:34:22 crc kubenswrapper[4605]: E1001 14:34:22.604855 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:34:34 crc kubenswrapper[4605]: I1001 14:34:34.926872 4605 scope.go:117] "RemoveContainer" containerID="65c5b0f77ed3d625b4751ed14a86e021ac1ed482b076d853d9e9fd4b1ec77e97" Oct 01 14:34:34 crc kubenswrapper[4605]: E1001 14:34:34.927580 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:34:47 crc kubenswrapper[4605]: I1001 14:34:47.937529 4605 scope.go:117] "RemoveContainer" containerID="65c5b0f77ed3d625b4751ed14a86e021ac1ed482b076d853d9e9fd4b1ec77e97" Oct 01 14:34:47 crc kubenswrapper[4605]: E1001 14:34:47.938340 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:34:58 crc kubenswrapper[4605]: I1001 14:34:58.926981 4605 scope.go:117] "RemoveContainer" containerID="65c5b0f77ed3d625b4751ed14a86e021ac1ed482b076d853d9e9fd4b1ec77e97" Oct 01 14:34:58 crc kubenswrapper[4605]: E1001 14:34:58.927715 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:35:09 crc kubenswrapper[4605]: I1001 14:35:09.931313 4605 scope.go:117] "RemoveContainer" containerID="65c5b0f77ed3d625b4751ed14a86e021ac1ed482b076d853d9e9fd4b1ec77e97" Oct 01 14:35:09 crc kubenswrapper[4605]: E1001 14:35:09.933426 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:35:24 crc kubenswrapper[4605]: I1001 14:35:24.928030 4605 scope.go:117] "RemoveContainer" containerID="65c5b0f77ed3d625b4751ed14a86e021ac1ed482b076d853d9e9fd4b1ec77e97" Oct 01 14:35:24 crc kubenswrapper[4605]: E1001 14:35:24.929418 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:35:37 crc kubenswrapper[4605]: I1001 14:35:37.935193 4605 scope.go:117] "RemoveContainer" containerID="65c5b0f77ed3d625b4751ed14a86e021ac1ed482b076d853d9e9fd4b1ec77e97" Oct 01 14:35:37 crc kubenswrapper[4605]: E1001 14:35:37.936219 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:35:48 crc kubenswrapper[4605]: I1001 14:35:48.927277 4605 scope.go:117] "RemoveContainer" containerID="65c5b0f77ed3d625b4751ed14a86e021ac1ed482b076d853d9e9fd4b1ec77e97" Oct 01 14:35:48 crc kubenswrapper[4605]: E1001 14:35:48.928644 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:36:02 crc kubenswrapper[4605]: I1001 14:36:02.927083 4605 scope.go:117] "RemoveContainer" containerID="65c5b0f77ed3d625b4751ed14a86e021ac1ed482b076d853d9e9fd4b1ec77e97" Oct 01 14:36:02 crc kubenswrapper[4605]: E1001 14:36:02.927863 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:36:16 crc kubenswrapper[4605]: I1001 14:36:16.927236 4605 scope.go:117] "RemoveContainer" containerID="65c5b0f77ed3d625b4751ed14a86e021ac1ed482b076d853d9e9fd4b1ec77e97" Oct 01 14:36:16 crc kubenswrapper[4605]: E1001 14:36:16.927950 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:36:31 crc kubenswrapper[4605]: I1001 14:36:31.927606 4605 scope.go:117] "RemoveContainer" containerID="65c5b0f77ed3d625b4751ed14a86e021ac1ed482b076d853d9e9fd4b1ec77e97" Oct 01 14:36:31 crc kubenswrapper[4605]: E1001 14:36:31.928450 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:36:34 crc kubenswrapper[4605]: I1001 14:36:34.831884 4605 generic.go:334] "Generic (PLEG): container finished" podID="2de51de5-4325-49f8-9179-f18e4de5fd46" containerID="f4f06a811e54fd683e04effb7ea99e6e5cf2f7d720d666a1778ad4a875b25bf4" exitCode=0 Oct 01 14:36:34 crc kubenswrapper[4605]: I1001 14:36:34.831972 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"2de51de5-4325-49f8-9179-f18e4de5fd46","Type":"ContainerDied","Data":"f4f06a811e54fd683e04effb7ea99e6e5cf2f7d720d666a1778ad4a875b25bf4"} Oct 01 14:36:36 crc kubenswrapper[4605]: I1001 14:36:36.214226 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Oct 01 14:36:36 crc kubenswrapper[4605]: I1001 14:36:36.235845 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/2de51de5-4325-49f8-9179-f18e4de5fd46-ca-certs\") pod \"2de51de5-4325-49f8-9179-f18e4de5fd46\" (UID: \"2de51de5-4325-49f8-9179-f18e4de5fd46\") " Oct 01 14:36:36 crc kubenswrapper[4605]: I1001 14:36:36.236237 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bt87d\" (UniqueName: \"kubernetes.io/projected/2de51de5-4325-49f8-9179-f18e4de5fd46-kube-api-access-bt87d\") pod \"2de51de5-4325-49f8-9179-f18e4de5fd46\" (UID: \"2de51de5-4325-49f8-9179-f18e4de5fd46\") " Oct 01 14:36:36 crc kubenswrapper[4605]: I1001 14:36:36.236279 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/2de51de5-4325-49f8-9179-f18e4de5fd46-test-operator-ephemeral-workdir\") pod \"2de51de5-4325-49f8-9179-f18e4de5fd46\" (UID: \"2de51de5-4325-49f8-9179-f18e4de5fd46\") " Oct 01 14:36:36 crc kubenswrapper[4605]: I1001 14:36:36.236365 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/2de51de5-4325-49f8-9179-f18e4de5fd46-openstack-config-secret\") pod \"2de51de5-4325-49f8-9179-f18e4de5fd46\" (UID: \"2de51de5-4325-49f8-9179-f18e4de5fd46\") " Oct 01 14:36:36 crc kubenswrapper[4605]: I1001 14:36:36.236485 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/2de51de5-4325-49f8-9179-f18e4de5fd46-openstack-config\") pod \"2de51de5-4325-49f8-9179-f18e4de5fd46\" (UID: \"2de51de5-4325-49f8-9179-f18e4de5fd46\") " Oct 01 14:36:36 crc kubenswrapper[4605]: I1001 14:36:36.236516 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/2de51de5-4325-49f8-9179-f18e4de5fd46-test-operator-ephemeral-temporary\") pod \"2de51de5-4325-49f8-9179-f18e4de5fd46\" (UID: \"2de51de5-4325-49f8-9179-f18e4de5fd46\") " Oct 01 14:36:36 crc kubenswrapper[4605]: I1001 14:36:36.236551 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2de51de5-4325-49f8-9179-f18e4de5fd46-ssh-key\") pod \"2de51de5-4325-49f8-9179-f18e4de5fd46\" (UID: \"2de51de5-4325-49f8-9179-f18e4de5fd46\") " Oct 01 14:36:36 crc kubenswrapper[4605]: I1001 14:36:36.236588 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-logs\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"2de51de5-4325-49f8-9179-f18e4de5fd46\" (UID: \"2de51de5-4325-49f8-9179-f18e4de5fd46\") " Oct 01 14:36:36 crc kubenswrapper[4605]: I1001 14:36:36.236647 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2de51de5-4325-49f8-9179-f18e4de5fd46-config-data\") pod \"2de51de5-4325-49f8-9179-f18e4de5fd46\" (UID: \"2de51de5-4325-49f8-9179-f18e4de5fd46\") " Oct 01 14:36:36 crc kubenswrapper[4605]: I1001 14:36:36.237942 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2de51de5-4325-49f8-9179-f18e4de5fd46-config-data" (OuterVolumeSpecName: "config-data") pod "2de51de5-4325-49f8-9179-f18e4de5fd46" (UID: "2de51de5-4325-49f8-9179-f18e4de5fd46"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:36:36 crc kubenswrapper[4605]: I1001 14:36:36.238580 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2de51de5-4325-49f8-9179-f18e4de5fd46-test-operator-ephemeral-temporary" (OuterVolumeSpecName: "test-operator-ephemeral-temporary") pod "2de51de5-4325-49f8-9179-f18e4de5fd46" (UID: "2de51de5-4325-49f8-9179-f18e4de5fd46"). InnerVolumeSpecName "test-operator-ephemeral-temporary". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:36:36 crc kubenswrapper[4605]: I1001 14:36:36.244545 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2de51de5-4325-49f8-9179-f18e4de5fd46-test-operator-ephemeral-workdir" (OuterVolumeSpecName: "test-operator-ephemeral-workdir") pod "2de51de5-4325-49f8-9179-f18e4de5fd46" (UID: "2de51de5-4325-49f8-9179-f18e4de5fd46"). InnerVolumeSpecName "test-operator-ephemeral-workdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:36:36 crc kubenswrapper[4605]: I1001 14:36:36.254294 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "test-operator-logs") pod "2de51de5-4325-49f8-9179-f18e4de5fd46" (UID: "2de51de5-4325-49f8-9179-f18e4de5fd46"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 01 14:36:36 crc kubenswrapper[4605]: I1001 14:36:36.265144 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2de51de5-4325-49f8-9179-f18e4de5fd46-kube-api-access-bt87d" (OuterVolumeSpecName: "kube-api-access-bt87d") pod "2de51de5-4325-49f8-9179-f18e4de5fd46" (UID: "2de51de5-4325-49f8-9179-f18e4de5fd46"). InnerVolumeSpecName "kube-api-access-bt87d". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:36:36 crc kubenswrapper[4605]: I1001 14:36:36.274354 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2de51de5-4325-49f8-9179-f18e4de5fd46-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "2de51de5-4325-49f8-9179-f18e4de5fd46" (UID: "2de51de5-4325-49f8-9179-f18e4de5fd46"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:36:36 crc kubenswrapper[4605]: I1001 14:36:36.286231 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2de51de5-4325-49f8-9179-f18e4de5fd46-ca-certs" (OuterVolumeSpecName: "ca-certs") pod "2de51de5-4325-49f8-9179-f18e4de5fd46" (UID: "2de51de5-4325-49f8-9179-f18e4de5fd46"). InnerVolumeSpecName "ca-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:36:36 crc kubenswrapper[4605]: I1001 14:36:36.290206 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2de51de5-4325-49f8-9179-f18e4de5fd46-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "2de51de5-4325-49f8-9179-f18e4de5fd46" (UID: "2de51de5-4325-49f8-9179-f18e4de5fd46"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:36:36 crc kubenswrapper[4605]: I1001 14:36:36.307133 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2de51de5-4325-49f8-9179-f18e4de5fd46-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "2de51de5-4325-49f8-9179-f18e4de5fd46" (UID: "2de51de5-4325-49f8-9179-f18e4de5fd46"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:36:36 crc kubenswrapper[4605]: I1001 14:36:36.339874 4605 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/2de51de5-4325-49f8-9179-f18e4de5fd46-openstack-config\") on node \"crc\" DevicePath \"\"" Oct 01 14:36:36 crc kubenswrapper[4605]: I1001 14:36:36.339933 4605 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/2de51de5-4325-49f8-9179-f18e4de5fd46-test-operator-ephemeral-temporary\") on node \"crc\" DevicePath \"\"" Oct 01 14:36:36 crc kubenswrapper[4605]: I1001 14:36:36.339958 4605 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2de51de5-4325-49f8-9179-f18e4de5fd46-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 01 14:36:36 crc kubenswrapper[4605]: I1001 14:36:36.340015 4605 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Oct 01 14:36:36 crc kubenswrapper[4605]: I1001 14:36:36.340034 4605 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2de51de5-4325-49f8-9179-f18e4de5fd46-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 14:36:36 crc kubenswrapper[4605]: I1001 14:36:36.340051 4605 reconciler_common.go:293] "Volume detached for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/2de51de5-4325-49f8-9179-f18e4de5fd46-ca-certs\") on node \"crc\" DevicePath \"\"" Oct 01 14:36:36 crc kubenswrapper[4605]: I1001 14:36:36.340068 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bt87d\" (UniqueName: \"kubernetes.io/projected/2de51de5-4325-49f8-9179-f18e4de5fd46-kube-api-access-bt87d\") on node \"crc\" DevicePath \"\"" Oct 01 14:36:36 crc kubenswrapper[4605]: I1001 14:36:36.340085 4605 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/2de51de5-4325-49f8-9179-f18e4de5fd46-test-operator-ephemeral-workdir\") on node \"crc\" DevicePath \"\"" Oct 01 14:36:36 crc kubenswrapper[4605]: I1001 14:36:36.340200 4605 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/2de51de5-4325-49f8-9179-f18e4de5fd46-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Oct 01 14:36:36 crc kubenswrapper[4605]: I1001 14:36:36.365003 4605 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Oct 01 14:36:36 crc kubenswrapper[4605]: I1001 14:36:36.442194 4605 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Oct 01 14:36:36 crc kubenswrapper[4605]: I1001 14:36:36.857810 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"2de51de5-4325-49f8-9179-f18e4de5fd46","Type":"ContainerDied","Data":"d6657e44c1d5f2cd9f61667290c34f092435b1198c3881d6e541540fd3eb1f27"} Oct 01 14:36:36 crc kubenswrapper[4605]: I1001 14:36:36.857855 4605 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d6657e44c1d5f2cd9f61667290c34f092435b1198c3881d6e541540fd3eb1f27" Oct 01 14:36:36 crc kubenswrapper[4605]: I1001 14:36:36.857942 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Oct 01 14:36:42 crc kubenswrapper[4605]: I1001 14:36:42.927192 4605 scope.go:117] "RemoveContainer" containerID="65c5b0f77ed3d625b4751ed14a86e021ac1ed482b076d853d9e9fd4b1ec77e97" Oct 01 14:36:42 crc kubenswrapper[4605]: E1001 14:36:42.928273 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:36:48 crc kubenswrapper[4605]: I1001 14:36:48.694398 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Oct 01 14:36:48 crc kubenswrapper[4605]: E1001 14:36:48.711590 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8a60278-f4f7-4b95-90b4-99a70e1d6a2e" containerName="registry-server" Oct 01 14:36:48 crc kubenswrapper[4605]: I1001 14:36:48.711650 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8a60278-f4f7-4b95-90b4-99a70e1d6a2e" containerName="registry-server" Oct 01 14:36:48 crc kubenswrapper[4605]: E1001 14:36:48.711693 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8a60278-f4f7-4b95-90b4-99a70e1d6a2e" containerName="extract-content" Oct 01 14:36:48 crc kubenswrapper[4605]: I1001 14:36:48.711707 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8a60278-f4f7-4b95-90b4-99a70e1d6a2e" containerName="extract-content" Oct 01 14:36:48 crc kubenswrapper[4605]: E1001 14:36:48.711789 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3ab2920-c567-4058-8b11-eefe7839f8be" containerName="extract-utilities" Oct 01 14:36:48 crc kubenswrapper[4605]: I1001 14:36:48.711803 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3ab2920-c567-4058-8b11-eefe7839f8be" containerName="extract-utilities" Oct 01 14:36:48 crc kubenswrapper[4605]: E1001 14:36:48.711821 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2de51de5-4325-49f8-9179-f18e4de5fd46" containerName="tempest-tests-tempest-tests-runner" Oct 01 14:36:48 crc kubenswrapper[4605]: I1001 14:36:48.711834 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="2de51de5-4325-49f8-9179-f18e4de5fd46" containerName="tempest-tests-tempest-tests-runner" Oct 01 14:36:48 crc kubenswrapper[4605]: E1001 14:36:48.711858 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3ab2920-c567-4058-8b11-eefe7839f8be" containerName="registry-server" Oct 01 14:36:48 crc kubenswrapper[4605]: I1001 14:36:48.711871 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3ab2920-c567-4058-8b11-eefe7839f8be" containerName="registry-server" Oct 01 14:36:48 crc kubenswrapper[4605]: E1001 14:36:48.711891 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8a60278-f4f7-4b95-90b4-99a70e1d6a2e" containerName="extract-utilities" Oct 01 14:36:48 crc kubenswrapper[4605]: I1001 14:36:48.711903 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8a60278-f4f7-4b95-90b4-99a70e1d6a2e" containerName="extract-utilities" Oct 01 14:36:48 crc kubenswrapper[4605]: E1001 14:36:48.711921 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3ab2920-c567-4058-8b11-eefe7839f8be" containerName="extract-content" Oct 01 14:36:48 crc kubenswrapper[4605]: I1001 14:36:48.711933 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3ab2920-c567-4058-8b11-eefe7839f8be" containerName="extract-content" Oct 01 14:36:48 crc kubenswrapper[4605]: I1001 14:36:48.712378 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="a3ab2920-c567-4058-8b11-eefe7839f8be" containerName="registry-server" Oct 01 14:36:48 crc kubenswrapper[4605]: I1001 14:36:48.712414 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="f8a60278-f4f7-4b95-90b4-99a70e1d6a2e" containerName="registry-server" Oct 01 14:36:48 crc kubenswrapper[4605]: I1001 14:36:48.712449 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="2de51de5-4325-49f8-9179-f18e4de5fd46" containerName="tempest-tests-tempest-tests-runner" Oct 01 14:36:48 crc kubenswrapper[4605]: I1001 14:36:48.713619 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Oct 01 14:36:48 crc kubenswrapper[4605]: I1001 14:36:48.713810 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 01 14:36:48 crc kubenswrapper[4605]: I1001 14:36:48.717555 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-dz8dz" Oct 01 14:36:48 crc kubenswrapper[4605]: I1001 14:36:48.790762 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dmmxv\" (UniqueName: \"kubernetes.io/projected/be5d33d0-2abd-424f-a518-f5eb5aa62661-kube-api-access-dmmxv\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"be5d33d0-2abd-424f-a518-f5eb5aa62661\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 01 14:36:48 crc kubenswrapper[4605]: I1001 14:36:48.790824 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"be5d33d0-2abd-424f-a518-f5eb5aa62661\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 01 14:36:48 crc kubenswrapper[4605]: I1001 14:36:48.892863 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dmmxv\" (UniqueName: \"kubernetes.io/projected/be5d33d0-2abd-424f-a518-f5eb5aa62661-kube-api-access-dmmxv\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"be5d33d0-2abd-424f-a518-f5eb5aa62661\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 01 14:36:48 crc kubenswrapper[4605]: I1001 14:36:48.892965 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"be5d33d0-2abd-424f-a518-f5eb5aa62661\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 01 14:36:48 crc kubenswrapper[4605]: I1001 14:36:48.894237 4605 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"be5d33d0-2abd-424f-a518-f5eb5aa62661\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 01 14:36:48 crc kubenswrapper[4605]: I1001 14:36:48.915718 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dmmxv\" (UniqueName: \"kubernetes.io/projected/be5d33d0-2abd-424f-a518-f5eb5aa62661-kube-api-access-dmmxv\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"be5d33d0-2abd-424f-a518-f5eb5aa62661\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 01 14:36:48 crc kubenswrapper[4605]: I1001 14:36:48.942739 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"be5d33d0-2abd-424f-a518-f5eb5aa62661\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 01 14:36:49 crc kubenswrapper[4605]: I1001 14:36:49.045961 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 01 14:36:49 crc kubenswrapper[4605]: I1001 14:36:49.503930 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Oct 01 14:36:49 crc kubenswrapper[4605]: I1001 14:36:49.514903 4605 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 01 14:36:50 crc kubenswrapper[4605]: I1001 14:36:50.006023 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"be5d33d0-2abd-424f-a518-f5eb5aa62661","Type":"ContainerStarted","Data":"fc1d6896f26c163a482c37c5f32e2c87767bb43307ee49f589154e8d42959130"} Oct 01 14:36:51 crc kubenswrapper[4605]: I1001 14:36:51.014654 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"be5d33d0-2abd-424f-a518-f5eb5aa62661","Type":"ContainerStarted","Data":"532620317a0ffd4f7882d86bd82521f85ff5ea4e254089da124b81d262f7f99b"} Oct 01 14:36:51 crc kubenswrapper[4605]: I1001 14:36:51.033913 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" podStartSLOduration=2.138741515 podStartE2EDuration="3.033892036s" podCreationTimestamp="2025-10-01 14:36:48 +0000 UTC" firstStartedPulling="2025-10-01 14:36:49.51450813 +0000 UTC m=+3132.258484348" lastFinishedPulling="2025-10-01 14:36:50.409658671 +0000 UTC m=+3133.153634869" observedRunningTime="2025-10-01 14:36:51.025390922 +0000 UTC m=+3133.769367130" watchObservedRunningTime="2025-10-01 14:36:51.033892036 +0000 UTC m=+3133.777868254" Oct 01 14:36:55 crc kubenswrapper[4605]: I1001 14:36:55.926880 4605 scope.go:117] "RemoveContainer" containerID="65c5b0f77ed3d625b4751ed14a86e021ac1ed482b076d853d9e9fd4b1ec77e97" Oct 01 14:36:55 crc kubenswrapper[4605]: E1001 14:36:55.928498 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:37:08 crc kubenswrapper[4605]: I1001 14:37:08.089111 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-xb4pj/must-gather-wrnp4"] Oct 01 14:37:08 crc kubenswrapper[4605]: I1001 14:37:08.091601 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xb4pj/must-gather-wrnp4" Oct 01 14:37:08 crc kubenswrapper[4605]: I1001 14:37:08.103449 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-xb4pj"/"kube-root-ca.crt" Oct 01 14:37:08 crc kubenswrapper[4605]: I1001 14:37:08.103749 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-xb4pj"/"openshift-service-ca.crt" Oct 01 14:37:08 crc kubenswrapper[4605]: I1001 14:37:08.113403 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-xb4pj/must-gather-wrnp4"] Oct 01 14:37:08 crc kubenswrapper[4605]: I1001 14:37:08.282385 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nd6lr\" (UniqueName: \"kubernetes.io/projected/3487f361-3e39-465e-9993-d6829b809a5d-kube-api-access-nd6lr\") pod \"must-gather-wrnp4\" (UID: \"3487f361-3e39-465e-9993-d6829b809a5d\") " pod="openshift-must-gather-xb4pj/must-gather-wrnp4" Oct 01 14:37:08 crc kubenswrapper[4605]: I1001 14:37:08.283105 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/3487f361-3e39-465e-9993-d6829b809a5d-must-gather-output\") pod \"must-gather-wrnp4\" (UID: \"3487f361-3e39-465e-9993-d6829b809a5d\") " pod="openshift-must-gather-xb4pj/must-gather-wrnp4" Oct 01 14:37:08 crc kubenswrapper[4605]: I1001 14:37:08.384025 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/3487f361-3e39-465e-9993-d6829b809a5d-must-gather-output\") pod \"must-gather-wrnp4\" (UID: \"3487f361-3e39-465e-9993-d6829b809a5d\") " pod="openshift-must-gather-xb4pj/must-gather-wrnp4" Oct 01 14:37:08 crc kubenswrapper[4605]: I1001 14:37:08.384326 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nd6lr\" (UniqueName: \"kubernetes.io/projected/3487f361-3e39-465e-9993-d6829b809a5d-kube-api-access-nd6lr\") pod \"must-gather-wrnp4\" (UID: \"3487f361-3e39-465e-9993-d6829b809a5d\") " pod="openshift-must-gather-xb4pj/must-gather-wrnp4" Oct 01 14:37:08 crc kubenswrapper[4605]: I1001 14:37:08.384604 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/3487f361-3e39-465e-9993-d6829b809a5d-must-gather-output\") pod \"must-gather-wrnp4\" (UID: \"3487f361-3e39-465e-9993-d6829b809a5d\") " pod="openshift-must-gather-xb4pj/must-gather-wrnp4" Oct 01 14:37:08 crc kubenswrapper[4605]: I1001 14:37:08.418789 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nd6lr\" (UniqueName: \"kubernetes.io/projected/3487f361-3e39-465e-9993-d6829b809a5d-kube-api-access-nd6lr\") pod \"must-gather-wrnp4\" (UID: \"3487f361-3e39-465e-9993-d6829b809a5d\") " pod="openshift-must-gather-xb4pj/must-gather-wrnp4" Oct 01 14:37:08 crc kubenswrapper[4605]: I1001 14:37:08.425482 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xb4pj/must-gather-wrnp4" Oct 01 14:37:09 crc kubenswrapper[4605]: I1001 14:37:09.048896 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-xb4pj/must-gather-wrnp4"] Oct 01 14:37:09 crc kubenswrapper[4605]: I1001 14:37:09.209722 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-xb4pj/must-gather-wrnp4" event={"ID":"3487f361-3e39-465e-9993-d6829b809a5d","Type":"ContainerStarted","Data":"094c4a60f8353dc84d986354bc56cdd1466da9c206376ed9ec1fd0e0c262dbae"} Oct 01 14:37:10 crc kubenswrapper[4605]: I1001 14:37:10.927029 4605 scope.go:117] "RemoveContainer" containerID="65c5b0f77ed3d625b4751ed14a86e021ac1ed482b076d853d9e9fd4b1ec77e97" Oct 01 14:37:10 crc kubenswrapper[4605]: E1001 14:37:10.927568 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:37:18 crc kubenswrapper[4605]: I1001 14:37:18.308193 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-xb4pj/must-gather-wrnp4" event={"ID":"3487f361-3e39-465e-9993-d6829b809a5d","Type":"ContainerStarted","Data":"23dd8fdc84b7177126e8a641c838288c9e175aa04ddf5972a1a16124b1372c56"} Oct 01 14:37:18 crc kubenswrapper[4605]: I1001 14:37:18.308892 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-xb4pj/must-gather-wrnp4" event={"ID":"3487f361-3e39-465e-9993-d6829b809a5d","Type":"ContainerStarted","Data":"e24f115d16ddff72497b4c0f2d5dafb99d16a5147464408d71e4035235a5d774"} Oct 01 14:37:18 crc kubenswrapper[4605]: I1001 14:37:18.329789 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-xb4pj/must-gather-wrnp4" podStartSLOduration=2.192359348 podStartE2EDuration="10.329775203s" podCreationTimestamp="2025-10-01 14:37:08 +0000 UTC" firstStartedPulling="2025-10-01 14:37:09.072342679 +0000 UTC m=+3151.816318877" lastFinishedPulling="2025-10-01 14:37:17.209758524 +0000 UTC m=+3159.953734732" observedRunningTime="2025-10-01 14:37:18.324655914 +0000 UTC m=+3161.068632122" watchObservedRunningTime="2025-10-01 14:37:18.329775203 +0000 UTC m=+3161.073751411" Oct 01 14:37:21 crc kubenswrapper[4605]: I1001 14:37:21.646899 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-xb4pj/crc-debug-5579g"] Oct 01 14:37:21 crc kubenswrapper[4605]: I1001 14:37:21.649486 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xb4pj/crc-debug-5579g" Oct 01 14:37:21 crc kubenswrapper[4605]: I1001 14:37:21.651610 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-xb4pj"/"default-dockercfg-bt4sx" Oct 01 14:37:21 crc kubenswrapper[4605]: I1001 14:37:21.758052 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cbstk\" (UniqueName: \"kubernetes.io/projected/6825a418-6b69-4ba0-8067-d512e8e158e7-kube-api-access-cbstk\") pod \"crc-debug-5579g\" (UID: \"6825a418-6b69-4ba0-8067-d512e8e158e7\") " pod="openshift-must-gather-xb4pj/crc-debug-5579g" Oct 01 14:37:21 crc kubenswrapper[4605]: I1001 14:37:21.758123 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6825a418-6b69-4ba0-8067-d512e8e158e7-host\") pod \"crc-debug-5579g\" (UID: \"6825a418-6b69-4ba0-8067-d512e8e158e7\") " pod="openshift-must-gather-xb4pj/crc-debug-5579g" Oct 01 14:37:21 crc kubenswrapper[4605]: I1001 14:37:21.859617 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cbstk\" (UniqueName: \"kubernetes.io/projected/6825a418-6b69-4ba0-8067-d512e8e158e7-kube-api-access-cbstk\") pod \"crc-debug-5579g\" (UID: \"6825a418-6b69-4ba0-8067-d512e8e158e7\") " pod="openshift-must-gather-xb4pj/crc-debug-5579g" Oct 01 14:37:21 crc kubenswrapper[4605]: I1001 14:37:21.859658 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6825a418-6b69-4ba0-8067-d512e8e158e7-host\") pod \"crc-debug-5579g\" (UID: \"6825a418-6b69-4ba0-8067-d512e8e158e7\") " pod="openshift-must-gather-xb4pj/crc-debug-5579g" Oct 01 14:37:21 crc kubenswrapper[4605]: I1001 14:37:21.859887 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6825a418-6b69-4ba0-8067-d512e8e158e7-host\") pod \"crc-debug-5579g\" (UID: \"6825a418-6b69-4ba0-8067-d512e8e158e7\") " pod="openshift-must-gather-xb4pj/crc-debug-5579g" Oct 01 14:37:21 crc kubenswrapper[4605]: I1001 14:37:21.878845 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cbstk\" (UniqueName: \"kubernetes.io/projected/6825a418-6b69-4ba0-8067-d512e8e158e7-kube-api-access-cbstk\") pod \"crc-debug-5579g\" (UID: \"6825a418-6b69-4ba0-8067-d512e8e158e7\") " pod="openshift-must-gather-xb4pj/crc-debug-5579g" Oct 01 14:37:21 crc kubenswrapper[4605]: I1001 14:37:21.927175 4605 scope.go:117] "RemoveContainer" containerID="65c5b0f77ed3d625b4751ed14a86e021ac1ed482b076d853d9e9fd4b1ec77e97" Oct 01 14:37:21 crc kubenswrapper[4605]: E1001 14:37:21.927403 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:37:21 crc kubenswrapper[4605]: I1001 14:37:21.966320 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xb4pj/crc-debug-5579g" Oct 01 14:37:22 crc kubenswrapper[4605]: I1001 14:37:22.349279 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-xb4pj/crc-debug-5579g" event={"ID":"6825a418-6b69-4ba0-8067-d512e8e158e7","Type":"ContainerStarted","Data":"169e635753a54839071a569025c089be63e645f51bd424ba963165e583cf91be"} Oct 01 14:37:34 crc kubenswrapper[4605]: I1001 14:37:34.455175 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-xb4pj/crc-debug-5579g" event={"ID":"6825a418-6b69-4ba0-8067-d512e8e158e7","Type":"ContainerStarted","Data":"e2a56dad9214cb32a5bff0db2ddad77046a12c206510641b4f084eca4017efe6"} Oct 01 14:37:34 crc kubenswrapper[4605]: I1001 14:37:34.474850 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-xb4pj/crc-debug-5579g" podStartSLOduration=1.960701582 podStartE2EDuration="13.474795616s" podCreationTimestamp="2025-10-01 14:37:21 +0000 UTC" firstStartedPulling="2025-10-01 14:37:22.008854336 +0000 UTC m=+3164.752830534" lastFinishedPulling="2025-10-01 14:37:33.52294836 +0000 UTC m=+3176.266924568" observedRunningTime="2025-10-01 14:37:34.468236371 +0000 UTC m=+3177.212212579" watchObservedRunningTime="2025-10-01 14:37:34.474795616 +0000 UTC m=+3177.218771834" Oct 01 14:37:34 crc kubenswrapper[4605]: I1001 14:37:34.926425 4605 scope.go:117] "RemoveContainer" containerID="65c5b0f77ed3d625b4751ed14a86e021ac1ed482b076d853d9e9fd4b1ec77e97" Oct 01 14:37:34 crc kubenswrapper[4605]: E1001 14:37:34.927136 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:37:48 crc kubenswrapper[4605]: I1001 14:37:48.926994 4605 scope.go:117] "RemoveContainer" containerID="65c5b0f77ed3d625b4751ed14a86e021ac1ed482b076d853d9e9fd4b1ec77e97" Oct 01 14:37:48 crc kubenswrapper[4605]: E1001 14:37:48.927694 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:38:03 crc kubenswrapper[4605]: I1001 14:38:03.928604 4605 scope.go:117] "RemoveContainer" containerID="65c5b0f77ed3d625b4751ed14a86e021ac1ed482b076d853d9e9fd4b1ec77e97" Oct 01 14:38:03 crc kubenswrapper[4605]: E1001 14:38:03.930353 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:38:18 crc kubenswrapper[4605]: I1001 14:38:18.927611 4605 scope.go:117] "RemoveContainer" containerID="65c5b0f77ed3d625b4751ed14a86e021ac1ed482b076d853d9e9fd4b1ec77e97" Oct 01 14:38:18 crc kubenswrapper[4605]: E1001 14:38:18.928632 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:38:33 crc kubenswrapper[4605]: I1001 14:38:33.930452 4605 scope.go:117] "RemoveContainer" containerID="65c5b0f77ed3d625b4751ed14a86e021ac1ed482b076d853d9e9fd4b1ec77e97" Oct 01 14:38:33 crc kubenswrapper[4605]: E1001 14:38:33.931302 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:38:37 crc kubenswrapper[4605]: I1001 14:38:37.795850 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-59b677b5cd-92trn_b92ac133-ded4-4276-a43a-7d9414d051ab/barbican-api/0.log" Oct 01 14:38:37 crc kubenswrapper[4605]: I1001 14:38:37.876181 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-59b677b5cd-92trn_b92ac133-ded4-4276-a43a-7d9414d051ab/barbican-api-log/0.log" Oct 01 14:38:38 crc kubenswrapper[4605]: I1001 14:38:38.088334 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-66797cbc6-zrzb7_991f1482-e0da-43ae-89c5-bb9a5beaee2f/barbican-keystone-listener/0.log" Oct 01 14:38:38 crc kubenswrapper[4605]: I1001 14:38:38.312971 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-66797cbc6-zrzb7_991f1482-e0da-43ae-89c5-bb9a5beaee2f/barbican-keystone-listener-log/0.log" Oct 01 14:38:38 crc kubenswrapper[4605]: I1001 14:38:38.321511 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-798f8d7567-pfjfn_6f8f7068-62be-4ee7-9f6a-63812a2f5413/barbican-worker/0.log" Oct 01 14:38:38 crc kubenswrapper[4605]: I1001 14:38:38.479472 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-798f8d7567-pfjfn_6f8f7068-62be-4ee7-9f6a-63812a2f5413/barbican-worker-log/0.log" Oct 01 14:38:38 crc kubenswrapper[4605]: I1001 14:38:38.548764 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-z9rb2_b1ad20ed-a28e-4787-ba19-58f0d1a1e5b6/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 14:38:38 crc kubenswrapper[4605]: I1001 14:38:38.751512 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_be76fa59-fd75-498e-9168-fa355659b827/ceilometer-central-agent/0.log" Oct 01 14:38:38 crc kubenswrapper[4605]: I1001 14:38:38.838336 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_be76fa59-fd75-498e-9168-fa355659b827/ceilometer-notification-agent/0.log" Oct 01 14:38:38 crc kubenswrapper[4605]: I1001 14:38:38.962578 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_be76fa59-fd75-498e-9168-fa355659b827/proxy-httpd/0.log" Oct 01 14:38:38 crc kubenswrapper[4605]: I1001 14:38:38.964057 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_be76fa59-fd75-498e-9168-fa355659b827/sg-core/0.log" Oct 01 14:38:39 crc kubenswrapper[4605]: I1001 14:38:39.149504 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_6a6bf8ea-cc99-43f4-913c-59039f5e11db/cinder-api/0.log" Oct 01 14:38:39 crc kubenswrapper[4605]: I1001 14:38:39.195425 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_6a6bf8ea-cc99-43f4-913c-59039f5e11db/cinder-api-log/0.log" Oct 01 14:38:39 crc kubenswrapper[4605]: I1001 14:38:39.380833 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_675a2ec1-82ad-4b20-a077-d8d427108ce7/cinder-scheduler/0.log" Oct 01 14:38:39 crc kubenswrapper[4605]: I1001 14:38:39.425929 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_675a2ec1-82ad-4b20-a077-d8d427108ce7/probe/0.log" Oct 01 14:38:39 crc kubenswrapper[4605]: I1001 14:38:39.673211 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-9pc94_31cfc16a-0d93-4cc4-9281-e4cee9664772/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 14:38:39 crc kubenswrapper[4605]: I1001 14:38:39.826043 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-b6ldt_84e4e275-5c28-4ea8-bf23-154b3aaa036d/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 14:38:39 crc kubenswrapper[4605]: I1001 14:38:39.899045 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-g6xmd_283ccb8d-6321-440f-a0a6-f2118a4f9bf5/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 14:38:40 crc kubenswrapper[4605]: I1001 14:38:40.087670 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-6bcf8b9d95-kpkj4_cfe22295-abd7-4094-b93e-3fb24d38242c/init/0.log" Oct 01 14:38:40 crc kubenswrapper[4605]: I1001 14:38:40.347588 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-6bcf8b9d95-kpkj4_cfe22295-abd7-4094-b93e-3fb24d38242c/init/0.log" Oct 01 14:38:40 crc kubenswrapper[4605]: I1001 14:38:40.460761 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-6bcf8b9d95-kpkj4_cfe22295-abd7-4094-b93e-3fb24d38242c/dnsmasq-dns/0.log" Oct 01 14:38:40 crc kubenswrapper[4605]: I1001 14:38:40.636099 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-8c89v_88958218-6061-4e38-b6fd-88b9502ebf30/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 14:38:40 crc kubenswrapper[4605]: I1001 14:38:40.756532 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_eac8da6a-ca40-4b05-b525-d645a20f3592/glance-httpd/0.log" Oct 01 14:38:40 crc kubenswrapper[4605]: I1001 14:38:40.878447 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_eac8da6a-ca40-4b05-b525-d645a20f3592/glance-log/0.log" Oct 01 14:38:41 crc kubenswrapper[4605]: I1001 14:38:41.009675 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_2cbd5fa3-f41d-4f68-a0f3-ef876a17a0b6/glance-httpd/0.log" Oct 01 14:38:41 crc kubenswrapper[4605]: I1001 14:38:41.178722 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_2cbd5fa3-f41d-4f68-a0f3-ef876a17a0b6/glance-log/0.log" Oct 01 14:38:41 crc kubenswrapper[4605]: I1001 14:38:41.393159 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-64d6df575b-5ctbf_80fb1c51-bd86-4896-8dac-59747473f066/horizon/0.log" Oct 01 14:38:41 crc kubenswrapper[4605]: I1001 14:38:41.653297 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp_3187f065-743c-4531-93b0-12c666bdd4c3/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 14:38:41 crc kubenswrapper[4605]: I1001 14:38:41.658977 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-64d6df575b-5ctbf_80fb1c51-bd86-4896-8dac-59747473f066/horizon-log/0.log" Oct 01 14:38:41 crc kubenswrapper[4605]: I1001 14:38:41.811167 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-95t2p_0a383a44-66c3-466b-977f-4297fa2f9718/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 14:38:42 crc kubenswrapper[4605]: I1001 14:38:42.062039 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_d74d1f54-092e-4bd7-90f4-e6ad8e4a77ea/kube-state-metrics/0.log" Oct 01 14:38:42 crc kubenswrapper[4605]: I1001 14:38:42.161281 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-6964b49dc5-fgw45_471f90e3-9942-4516-ad5a-26cddd148bd4/keystone-api/0.log" Oct 01 14:38:42 crc kubenswrapper[4605]: I1001 14:38:42.340662 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-tlk9z_9df7ad58-d542-4c8a-89fb-464689d1729c/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 14:38:42 crc kubenswrapper[4605]: I1001 14:38:42.682728 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-f85558977-q9rhp_67c3654e-3eed-4260-8864-3ab0334a32a0/neutron-api/0.log" Oct 01 14:38:42 crc kubenswrapper[4605]: I1001 14:38:42.770613 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-f85558977-q9rhp_67c3654e-3eed-4260-8864-3ab0334a32a0/neutron-httpd/0.log" Oct 01 14:38:42 crc kubenswrapper[4605]: I1001 14:38:42.997478 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-wgm6n_338fec8e-40aa-4170-9f63-dd6ae6607d2d/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 14:38:43 crc kubenswrapper[4605]: I1001 14:38:43.319130 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_885f1ef5-027b-49b0-9c25-444a307d3075/nova-api-log/0.log" Oct 01 14:38:43 crc kubenswrapper[4605]: I1001 14:38:43.499577 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_885f1ef5-027b-49b0-9c25-444a307d3075/nova-api-api/0.log" Oct 01 14:38:43 crc kubenswrapper[4605]: I1001 14:38:43.638517 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_c93e48d5-b3b1-4390-bb46-308151d80e4e/nova-cell0-conductor-conductor/0.log" Oct 01 14:38:43 crc kubenswrapper[4605]: I1001 14:38:43.870025 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_97d9b4b0-ddd6-46ce-9c48-add8f2f3e0b2/nova-cell1-conductor-conductor/0.log" Oct 01 14:38:44 crc kubenswrapper[4605]: I1001 14:38:44.020360 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_02fa0bb4-5a25-43da-8f92-a8c0ca715032/nova-cell1-novncproxy-novncproxy/0.log" Oct 01 14:38:44 crc kubenswrapper[4605]: I1001 14:38:44.403517 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-v8zqd_07ba7a39-5510-4075-b789-aa61ef2643f5/nova-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 14:38:44 crc kubenswrapper[4605]: I1001 14:38:44.600786 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_05f7dd13-a8f8-4263-ad0f-87d5972c6eb0/nova-metadata-log/0.log" Oct 01 14:38:45 crc kubenswrapper[4605]: I1001 14:38:45.053485 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_3c7bfc3c-d34b-4bcf-ba1c-71ed87c74927/nova-scheduler-scheduler/0.log" Oct 01 14:38:45 crc kubenswrapper[4605]: I1001 14:38:45.326845 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_c414b65e-0cce-4d58-aa5d-08d0679595cd/mysql-bootstrap/0.log" Oct 01 14:38:45 crc kubenswrapper[4605]: I1001 14:38:45.572510 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_c414b65e-0cce-4d58-aa5d-08d0679595cd/mysql-bootstrap/0.log" Oct 01 14:38:45 crc kubenswrapper[4605]: I1001 14:38:45.600396 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_c414b65e-0cce-4d58-aa5d-08d0679595cd/galera/0.log" Oct 01 14:38:45 crc kubenswrapper[4605]: I1001 14:38:45.758301 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_05f7dd13-a8f8-4263-ad0f-87d5972c6eb0/nova-metadata-metadata/0.log" Oct 01 14:38:45 crc kubenswrapper[4605]: I1001 14:38:45.889983 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_e3e2c30f-0d23-4f2a-8cc5-5bacf9bcce9e/mysql-bootstrap/0.log" Oct 01 14:38:46 crc kubenswrapper[4605]: I1001 14:38:46.122352 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_e3e2c30f-0d23-4f2a-8cc5-5bacf9bcce9e/mysql-bootstrap/0.log" Oct 01 14:38:46 crc kubenswrapper[4605]: I1001 14:38:46.229354 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_e3e2c30f-0d23-4f2a-8cc5-5bacf9bcce9e/galera/0.log" Oct 01 14:38:46 crc kubenswrapper[4605]: I1001 14:38:46.384169 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_58264b9f-ddeb-466d-94aa-536c1a381308/openstackclient/0.log" Oct 01 14:38:46 crc kubenswrapper[4605]: I1001 14:38:46.595252 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-jvb44_a37367ae-0e7b-4ad1-afb4-c48ca6282706/ovn-controller/0.log" Oct 01 14:38:46 crc kubenswrapper[4605]: I1001 14:38:46.794973 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-ppfrt_2f65a22d-7b5c-43b7-88dc-c94ce75bf8b4/openstack-network-exporter/0.log" Oct 01 14:38:46 crc kubenswrapper[4605]: I1001 14:38:46.926411 4605 scope.go:117] "RemoveContainer" containerID="65c5b0f77ed3d625b4751ed14a86e021ac1ed482b076d853d9e9fd4b1ec77e97" Oct 01 14:38:46 crc kubenswrapper[4605]: E1001 14:38:46.926979 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:38:46 crc kubenswrapper[4605]: I1001 14:38:46.975575 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-csqtk_9e9376a7-1282-4f2c-b437-bf6eb57d2739/ovsdb-server-init/0.log" Oct 01 14:38:47 crc kubenswrapper[4605]: I1001 14:38:47.132011 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-csqtk_9e9376a7-1282-4f2c-b437-bf6eb57d2739/ovsdb-server-init/0.log" Oct 01 14:38:47 crc kubenswrapper[4605]: I1001 14:38:47.213082 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-csqtk_9e9376a7-1282-4f2c-b437-bf6eb57d2739/ovsdb-server/0.log" Oct 01 14:38:47 crc kubenswrapper[4605]: I1001 14:38:47.226722 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-csqtk_9e9376a7-1282-4f2c-b437-bf6eb57d2739/ovs-vswitchd/0.log" Oct 01 14:38:47 crc kubenswrapper[4605]: I1001 14:38:47.454764 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-x2z69_64e9c2e1-759a-4bb7-9fd4-56190af5f1b9/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 14:38:47 crc kubenswrapper[4605]: I1001 14:38:47.618376 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_e2f8bf30-b59a-4564-b6d2-5f201b0fe957/openstack-network-exporter/0.log" Oct 01 14:38:47 crc kubenswrapper[4605]: I1001 14:38:47.681390 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_e2f8bf30-b59a-4564-b6d2-5f201b0fe957/ovn-northd/0.log" Oct 01 14:38:47 crc kubenswrapper[4605]: I1001 14:38:47.966768 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_70acc9bd-54b2-4c70-bf3f-ce66a88bbd06/ovsdbserver-nb/0.log" Oct 01 14:38:48 crc kubenswrapper[4605]: I1001 14:38:48.057864 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_70acc9bd-54b2-4c70-bf3f-ce66a88bbd06/openstack-network-exporter/0.log" Oct 01 14:38:48 crc kubenswrapper[4605]: I1001 14:38:48.240942 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_e3eaff33-3a5d-4868-ba47-a03e7ac13ab5/openstack-network-exporter/0.log" Oct 01 14:38:48 crc kubenswrapper[4605]: I1001 14:38:48.338945 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_e3eaff33-3a5d-4868-ba47-a03e7ac13ab5/ovsdbserver-sb/0.log" Oct 01 14:38:48 crc kubenswrapper[4605]: I1001 14:38:48.632381 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-5cc6b8b7dd-f8khf_2524f72b-05b3-4299-90d6-4671b410d59a/placement-api/0.log" Oct 01 14:38:48 crc kubenswrapper[4605]: I1001 14:38:48.708592 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-5cc6b8b7dd-f8khf_2524f72b-05b3-4299-90d6-4671b410d59a/placement-log/0.log" Oct 01 14:38:48 crc kubenswrapper[4605]: I1001 14:38:48.890791 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_9d1c480b-5fd5-4134-913c-19381d8f4db4/setup-container/0.log" Oct 01 14:38:49 crc kubenswrapper[4605]: I1001 14:38:49.228884 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_9d1c480b-5fd5-4134-913c-19381d8f4db4/setup-container/0.log" Oct 01 14:38:49 crc kubenswrapper[4605]: I1001 14:38:49.347933 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_9d1c480b-5fd5-4134-913c-19381d8f4db4/rabbitmq/0.log" Oct 01 14:38:49 crc kubenswrapper[4605]: I1001 14:38:49.744955 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_1b035ab1-17f0-4d9e-91d4-983b4cd06469/setup-container/0.log" Oct 01 14:38:50 crc kubenswrapper[4605]: I1001 14:38:50.007230 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_1b035ab1-17f0-4d9e-91d4-983b4cd06469/setup-container/0.log" Oct 01 14:38:50 crc kubenswrapper[4605]: I1001 14:38:50.049656 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_1b035ab1-17f0-4d9e-91d4-983b4cd06469/rabbitmq/0.log" Oct 01 14:38:50 crc kubenswrapper[4605]: I1001 14:38:50.310105 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-n4v8b_0f112a96-c395-4ae9-8960-596266eb98b0/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 14:38:50 crc kubenswrapper[4605]: I1001 14:38:50.409086 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-tps9f_6e328f7a-3f9b-48c7-b277-cf0f99b9bf86/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 14:38:50 crc kubenswrapper[4605]: I1001 14:38:50.642164 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-s4rg2_9cd5ee34-51d2-4a40-9312-e83bf07927b7/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 14:38:51 crc kubenswrapper[4605]: I1001 14:38:51.007288 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-5lfmw_0ce2e0e7-1cfa-4fcb-87d3-214503a56fff/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 14:38:51 crc kubenswrapper[4605]: I1001 14:38:51.182187 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-ql62n_375a2afa-b804-4227-8137-eb7c5c56d8fb/ssh-known-hosts-edpm-deployment/0.log" Oct 01 14:38:51 crc kubenswrapper[4605]: I1001 14:38:51.370858 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-66b5967899-cv4c4_7ae58440-10a5-44a6-94e8-89d112c67651/proxy-httpd/0.log" Oct 01 14:38:51 crc kubenswrapper[4605]: I1001 14:38:51.412484 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-66b5967899-cv4c4_7ae58440-10a5-44a6-94e8-89d112c67651/proxy-server/0.log" Oct 01 14:38:51 crc kubenswrapper[4605]: I1001 14:38:51.707111 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-x8zcm_4c3df3b9-829b-4ebb-9593-487b1f6ddce1/swift-ring-rebalance/0.log" Oct 01 14:38:51 crc kubenswrapper[4605]: I1001 14:38:51.873327 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_fedbab19-fa82-4d92-b787-de85226cd34f/account-auditor/0.log" Oct 01 14:38:51 crc kubenswrapper[4605]: I1001 14:38:51.941818 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_fedbab19-fa82-4d92-b787-de85226cd34f/account-reaper/0.log" Oct 01 14:38:52 crc kubenswrapper[4605]: I1001 14:38:52.060071 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_fedbab19-fa82-4d92-b787-de85226cd34f/account-replicator/0.log" Oct 01 14:38:52 crc kubenswrapper[4605]: I1001 14:38:52.107166 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_fedbab19-fa82-4d92-b787-de85226cd34f/account-server/0.log" Oct 01 14:38:52 crc kubenswrapper[4605]: I1001 14:38:52.228762 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_fedbab19-fa82-4d92-b787-de85226cd34f/container-auditor/0.log" Oct 01 14:38:52 crc kubenswrapper[4605]: I1001 14:38:52.312542 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_fedbab19-fa82-4d92-b787-de85226cd34f/container-replicator/0.log" Oct 01 14:38:52 crc kubenswrapper[4605]: I1001 14:38:52.359158 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_fedbab19-fa82-4d92-b787-de85226cd34f/container-server/0.log" Oct 01 14:38:52 crc kubenswrapper[4605]: I1001 14:38:52.492674 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_fedbab19-fa82-4d92-b787-de85226cd34f/container-updater/0.log" Oct 01 14:38:52 crc kubenswrapper[4605]: I1001 14:38:52.576865 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_fedbab19-fa82-4d92-b787-de85226cd34f/object-auditor/0.log" Oct 01 14:38:52 crc kubenswrapper[4605]: I1001 14:38:52.625496 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_fedbab19-fa82-4d92-b787-de85226cd34f/object-expirer/0.log" Oct 01 14:38:52 crc kubenswrapper[4605]: I1001 14:38:52.784464 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_fedbab19-fa82-4d92-b787-de85226cd34f/object-replicator/0.log" Oct 01 14:38:52 crc kubenswrapper[4605]: I1001 14:38:52.859654 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_fedbab19-fa82-4d92-b787-de85226cd34f/object-server/0.log" Oct 01 14:38:52 crc kubenswrapper[4605]: I1001 14:38:52.929288 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_fedbab19-fa82-4d92-b787-de85226cd34f/object-updater/0.log" Oct 01 14:38:53 crc kubenswrapper[4605]: I1001 14:38:53.067785 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_fedbab19-fa82-4d92-b787-de85226cd34f/rsync/0.log" Oct 01 14:38:53 crc kubenswrapper[4605]: I1001 14:38:53.112911 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_fedbab19-fa82-4d92-b787-de85226cd34f/swift-recon-cron/0.log" Oct 01 14:38:53 crc kubenswrapper[4605]: I1001 14:38:53.387907 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-6bd7l_58600359-0fa8-4801-a1d3-87598ba13651/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 14:38:53 crc kubenswrapper[4605]: I1001 14:38:53.567221 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_2de51de5-4325-49f8-9179-f18e4de5fd46/tempest-tests-tempest-tests-runner/0.log" Oct 01 14:38:53 crc kubenswrapper[4605]: I1001 14:38:53.670588 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_be5d33d0-2abd-424f-a518-f5eb5aa62661/test-operator-logs-container/0.log" Oct 01 14:38:53 crc kubenswrapper[4605]: I1001 14:38:53.983902 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-5jnc8_94e0445e-6b97-4b10-80a3-5d8827ce0120/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 14:38:57 crc kubenswrapper[4605]: I1001 14:38:57.937450 4605 scope.go:117] "RemoveContainer" containerID="65c5b0f77ed3d625b4751ed14a86e021ac1ed482b076d853d9e9fd4b1ec77e97" Oct 01 14:38:57 crc kubenswrapper[4605]: E1001 14:38:57.938351 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:38:59 crc kubenswrapper[4605]: I1001 14:38:59.354642 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_cfe08201-447e-4697-95cb-dfdf59dfdbe9/memcached/0.log" Oct 01 14:39:11 crc kubenswrapper[4605]: I1001 14:39:11.926657 4605 scope.go:117] "RemoveContainer" containerID="65c5b0f77ed3d625b4751ed14a86e021ac1ed482b076d853d9e9fd4b1ec77e97" Oct 01 14:39:11 crc kubenswrapper[4605]: E1001 14:39:11.927419 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:39:22 crc kubenswrapper[4605]: I1001 14:39:22.927334 4605 scope.go:117] "RemoveContainer" containerID="65c5b0f77ed3d625b4751ed14a86e021ac1ed482b076d853d9e9fd4b1ec77e97" Oct 01 14:39:23 crc kubenswrapper[4605]: I1001 14:39:23.412064 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" event={"ID":"f3023060-c8ae-492b-b1cb-a418d9a8e59f","Type":"ContainerStarted","Data":"d8795f6535dab4a31d73cbcc308fbff0269900c90034691649da24daf04c865a"} Oct 01 14:39:35 crc kubenswrapper[4605]: I1001 14:39:35.524864 4605 generic.go:334] "Generic (PLEG): container finished" podID="6825a418-6b69-4ba0-8067-d512e8e158e7" containerID="e2a56dad9214cb32a5bff0db2ddad77046a12c206510641b4f084eca4017efe6" exitCode=0 Oct 01 14:39:35 crc kubenswrapper[4605]: I1001 14:39:35.524992 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-xb4pj/crc-debug-5579g" event={"ID":"6825a418-6b69-4ba0-8067-d512e8e158e7","Type":"ContainerDied","Data":"e2a56dad9214cb32a5bff0db2ddad77046a12c206510641b4f084eca4017efe6"} Oct 01 14:39:36 crc kubenswrapper[4605]: I1001 14:39:36.682861 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xb4pj/crc-debug-5579g" Oct 01 14:39:36 crc kubenswrapper[4605]: I1001 14:39:36.755574 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-xb4pj/crc-debug-5579g"] Oct 01 14:39:36 crc kubenswrapper[4605]: I1001 14:39:36.766642 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-xb4pj/crc-debug-5579g"] Oct 01 14:39:36 crc kubenswrapper[4605]: I1001 14:39:36.827196 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6825a418-6b69-4ba0-8067-d512e8e158e7-host\") pod \"6825a418-6b69-4ba0-8067-d512e8e158e7\" (UID: \"6825a418-6b69-4ba0-8067-d512e8e158e7\") " Oct 01 14:39:36 crc kubenswrapper[4605]: I1001 14:39:36.827548 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cbstk\" (UniqueName: \"kubernetes.io/projected/6825a418-6b69-4ba0-8067-d512e8e158e7-kube-api-access-cbstk\") pod \"6825a418-6b69-4ba0-8067-d512e8e158e7\" (UID: \"6825a418-6b69-4ba0-8067-d512e8e158e7\") " Oct 01 14:39:36 crc kubenswrapper[4605]: I1001 14:39:36.827426 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6825a418-6b69-4ba0-8067-d512e8e158e7-host" (OuterVolumeSpecName: "host") pod "6825a418-6b69-4ba0-8067-d512e8e158e7" (UID: "6825a418-6b69-4ba0-8067-d512e8e158e7"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 14:39:36 crc kubenswrapper[4605]: I1001 14:39:36.828351 4605 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6825a418-6b69-4ba0-8067-d512e8e158e7-host\") on node \"crc\" DevicePath \"\"" Oct 01 14:39:36 crc kubenswrapper[4605]: I1001 14:39:36.835398 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6825a418-6b69-4ba0-8067-d512e8e158e7-kube-api-access-cbstk" (OuterVolumeSpecName: "kube-api-access-cbstk") pod "6825a418-6b69-4ba0-8067-d512e8e158e7" (UID: "6825a418-6b69-4ba0-8067-d512e8e158e7"). InnerVolumeSpecName "kube-api-access-cbstk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:39:36 crc kubenswrapper[4605]: I1001 14:39:36.929827 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cbstk\" (UniqueName: \"kubernetes.io/projected/6825a418-6b69-4ba0-8067-d512e8e158e7-kube-api-access-cbstk\") on node \"crc\" DevicePath \"\"" Oct 01 14:39:37 crc kubenswrapper[4605]: I1001 14:39:37.555309 4605 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="169e635753a54839071a569025c089be63e645f51bd424ba963165e583cf91be" Oct 01 14:39:37 crc kubenswrapper[4605]: I1001 14:39:37.555511 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xb4pj/crc-debug-5579g" Oct 01 14:39:37 crc kubenswrapper[4605]: I1001 14:39:37.943835 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6825a418-6b69-4ba0-8067-d512e8e158e7" path="/var/lib/kubelet/pods/6825a418-6b69-4ba0-8067-d512e8e158e7/volumes" Oct 01 14:39:37 crc kubenswrapper[4605]: I1001 14:39:37.944553 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-xb4pj/crc-debug-rz2m7"] Oct 01 14:39:37 crc kubenswrapper[4605]: E1001 14:39:37.944952 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6825a418-6b69-4ba0-8067-d512e8e158e7" containerName="container-00" Oct 01 14:39:37 crc kubenswrapper[4605]: I1001 14:39:37.944967 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="6825a418-6b69-4ba0-8067-d512e8e158e7" containerName="container-00" Oct 01 14:39:37 crc kubenswrapper[4605]: I1001 14:39:37.945202 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="6825a418-6b69-4ba0-8067-d512e8e158e7" containerName="container-00" Oct 01 14:39:37 crc kubenswrapper[4605]: I1001 14:39:37.945944 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xb4pj/crc-debug-rz2m7" Oct 01 14:39:37 crc kubenswrapper[4605]: I1001 14:39:37.948542 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-xb4pj"/"default-dockercfg-bt4sx" Oct 01 14:39:38 crc kubenswrapper[4605]: I1001 14:39:38.050274 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/865bacd4-85a6-43dd-98f5-cbeed9c634ad-host\") pod \"crc-debug-rz2m7\" (UID: \"865bacd4-85a6-43dd-98f5-cbeed9c634ad\") " pod="openshift-must-gather-xb4pj/crc-debug-rz2m7" Oct 01 14:39:38 crc kubenswrapper[4605]: I1001 14:39:38.050396 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lffl4\" (UniqueName: \"kubernetes.io/projected/865bacd4-85a6-43dd-98f5-cbeed9c634ad-kube-api-access-lffl4\") pod \"crc-debug-rz2m7\" (UID: \"865bacd4-85a6-43dd-98f5-cbeed9c634ad\") " pod="openshift-must-gather-xb4pj/crc-debug-rz2m7" Oct 01 14:39:38 crc kubenswrapper[4605]: I1001 14:39:38.152001 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/865bacd4-85a6-43dd-98f5-cbeed9c634ad-host\") pod \"crc-debug-rz2m7\" (UID: \"865bacd4-85a6-43dd-98f5-cbeed9c634ad\") " pod="openshift-must-gather-xb4pj/crc-debug-rz2m7" Oct 01 14:39:38 crc kubenswrapper[4605]: I1001 14:39:38.152067 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lffl4\" (UniqueName: \"kubernetes.io/projected/865bacd4-85a6-43dd-98f5-cbeed9c634ad-kube-api-access-lffl4\") pod \"crc-debug-rz2m7\" (UID: \"865bacd4-85a6-43dd-98f5-cbeed9c634ad\") " pod="openshift-must-gather-xb4pj/crc-debug-rz2m7" Oct 01 14:39:38 crc kubenswrapper[4605]: I1001 14:39:38.152382 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/865bacd4-85a6-43dd-98f5-cbeed9c634ad-host\") pod \"crc-debug-rz2m7\" (UID: \"865bacd4-85a6-43dd-98f5-cbeed9c634ad\") " pod="openshift-must-gather-xb4pj/crc-debug-rz2m7" Oct 01 14:39:38 crc kubenswrapper[4605]: I1001 14:39:38.171079 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lffl4\" (UniqueName: \"kubernetes.io/projected/865bacd4-85a6-43dd-98f5-cbeed9c634ad-kube-api-access-lffl4\") pod \"crc-debug-rz2m7\" (UID: \"865bacd4-85a6-43dd-98f5-cbeed9c634ad\") " pod="openshift-must-gather-xb4pj/crc-debug-rz2m7" Oct 01 14:39:38 crc kubenswrapper[4605]: I1001 14:39:38.273300 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xb4pj/crc-debug-rz2m7" Oct 01 14:39:38 crc kubenswrapper[4605]: I1001 14:39:38.578004 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-xb4pj/crc-debug-rz2m7" event={"ID":"865bacd4-85a6-43dd-98f5-cbeed9c634ad","Type":"ContainerStarted","Data":"49a97bb97812e3e994233abd71388d1dbd6ec8615269de25f7bd61e98b2e6573"} Oct 01 14:39:39 crc kubenswrapper[4605]: I1001 14:39:39.608982 4605 generic.go:334] "Generic (PLEG): container finished" podID="865bacd4-85a6-43dd-98f5-cbeed9c634ad" containerID="5b95fcac86feb6959cd5dc4750bc7a500dc0438b98356893f5bf9f4ee54d15cc" exitCode=0 Oct 01 14:39:39 crc kubenswrapper[4605]: I1001 14:39:39.609036 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-xb4pj/crc-debug-rz2m7" event={"ID":"865bacd4-85a6-43dd-98f5-cbeed9c634ad","Type":"ContainerDied","Data":"5b95fcac86feb6959cd5dc4750bc7a500dc0438b98356893f5bf9f4ee54d15cc"} Oct 01 14:39:40 crc kubenswrapper[4605]: I1001 14:39:40.747647 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xb4pj/crc-debug-rz2m7" Oct 01 14:39:40 crc kubenswrapper[4605]: I1001 14:39:40.812062 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lffl4\" (UniqueName: \"kubernetes.io/projected/865bacd4-85a6-43dd-98f5-cbeed9c634ad-kube-api-access-lffl4\") pod \"865bacd4-85a6-43dd-98f5-cbeed9c634ad\" (UID: \"865bacd4-85a6-43dd-98f5-cbeed9c634ad\") " Oct 01 14:39:40 crc kubenswrapper[4605]: I1001 14:39:40.812266 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/865bacd4-85a6-43dd-98f5-cbeed9c634ad-host\") pod \"865bacd4-85a6-43dd-98f5-cbeed9c634ad\" (UID: \"865bacd4-85a6-43dd-98f5-cbeed9c634ad\") " Oct 01 14:39:40 crc kubenswrapper[4605]: I1001 14:39:40.812387 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/865bacd4-85a6-43dd-98f5-cbeed9c634ad-host" (OuterVolumeSpecName: "host") pod "865bacd4-85a6-43dd-98f5-cbeed9c634ad" (UID: "865bacd4-85a6-43dd-98f5-cbeed9c634ad"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 14:39:40 crc kubenswrapper[4605]: I1001 14:39:40.812700 4605 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/865bacd4-85a6-43dd-98f5-cbeed9c634ad-host\") on node \"crc\" DevicePath \"\"" Oct 01 14:39:40 crc kubenswrapper[4605]: I1001 14:39:40.817617 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/865bacd4-85a6-43dd-98f5-cbeed9c634ad-kube-api-access-lffl4" (OuterVolumeSpecName: "kube-api-access-lffl4") pod "865bacd4-85a6-43dd-98f5-cbeed9c634ad" (UID: "865bacd4-85a6-43dd-98f5-cbeed9c634ad"). InnerVolumeSpecName "kube-api-access-lffl4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:39:40 crc kubenswrapper[4605]: I1001 14:39:40.914512 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lffl4\" (UniqueName: \"kubernetes.io/projected/865bacd4-85a6-43dd-98f5-cbeed9c634ad-kube-api-access-lffl4\") on node \"crc\" DevicePath \"\"" Oct 01 14:39:41 crc kubenswrapper[4605]: I1001 14:39:41.628524 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-xb4pj/crc-debug-rz2m7" event={"ID":"865bacd4-85a6-43dd-98f5-cbeed9c634ad","Type":"ContainerDied","Data":"49a97bb97812e3e994233abd71388d1dbd6ec8615269de25f7bd61e98b2e6573"} Oct 01 14:39:41 crc kubenswrapper[4605]: I1001 14:39:41.628569 4605 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="49a97bb97812e3e994233abd71388d1dbd6ec8615269de25f7bd61e98b2e6573" Oct 01 14:39:41 crc kubenswrapper[4605]: I1001 14:39:41.628615 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xb4pj/crc-debug-rz2m7" Oct 01 14:39:45 crc kubenswrapper[4605]: I1001 14:39:45.360791 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-xb4pj/crc-debug-rz2m7"] Oct 01 14:39:45 crc kubenswrapper[4605]: I1001 14:39:45.368220 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-xb4pj/crc-debug-rz2m7"] Oct 01 14:39:45 crc kubenswrapper[4605]: I1001 14:39:45.937709 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="865bacd4-85a6-43dd-98f5-cbeed9c634ad" path="/var/lib/kubelet/pods/865bacd4-85a6-43dd-98f5-cbeed9c634ad/volumes" Oct 01 14:39:46 crc kubenswrapper[4605]: I1001 14:39:46.533840 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-xb4pj/crc-debug-s2gp8"] Oct 01 14:39:46 crc kubenswrapper[4605]: E1001 14:39:46.535240 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="865bacd4-85a6-43dd-98f5-cbeed9c634ad" containerName="container-00" Oct 01 14:39:46 crc kubenswrapper[4605]: I1001 14:39:46.535375 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="865bacd4-85a6-43dd-98f5-cbeed9c634ad" containerName="container-00" Oct 01 14:39:46 crc kubenswrapper[4605]: I1001 14:39:46.535706 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="865bacd4-85a6-43dd-98f5-cbeed9c634ad" containerName="container-00" Oct 01 14:39:46 crc kubenswrapper[4605]: I1001 14:39:46.536543 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xb4pj/crc-debug-s2gp8" Oct 01 14:39:46 crc kubenswrapper[4605]: I1001 14:39:46.544638 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-xb4pj"/"default-dockercfg-bt4sx" Oct 01 14:39:46 crc kubenswrapper[4605]: I1001 14:39:46.639785 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-69b5p\" (UniqueName: \"kubernetes.io/projected/52290aa3-eaa8-43dd-a43a-a136b36b5888-kube-api-access-69b5p\") pod \"crc-debug-s2gp8\" (UID: \"52290aa3-eaa8-43dd-a43a-a136b36b5888\") " pod="openshift-must-gather-xb4pj/crc-debug-s2gp8" Oct 01 14:39:46 crc kubenswrapper[4605]: I1001 14:39:46.639913 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/52290aa3-eaa8-43dd-a43a-a136b36b5888-host\") pod \"crc-debug-s2gp8\" (UID: \"52290aa3-eaa8-43dd-a43a-a136b36b5888\") " pod="openshift-must-gather-xb4pj/crc-debug-s2gp8" Oct 01 14:39:46 crc kubenswrapper[4605]: I1001 14:39:46.742013 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-69b5p\" (UniqueName: \"kubernetes.io/projected/52290aa3-eaa8-43dd-a43a-a136b36b5888-kube-api-access-69b5p\") pod \"crc-debug-s2gp8\" (UID: \"52290aa3-eaa8-43dd-a43a-a136b36b5888\") " pod="openshift-must-gather-xb4pj/crc-debug-s2gp8" Oct 01 14:39:46 crc kubenswrapper[4605]: I1001 14:39:46.742162 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/52290aa3-eaa8-43dd-a43a-a136b36b5888-host\") pod \"crc-debug-s2gp8\" (UID: \"52290aa3-eaa8-43dd-a43a-a136b36b5888\") " pod="openshift-must-gather-xb4pj/crc-debug-s2gp8" Oct 01 14:39:46 crc kubenswrapper[4605]: I1001 14:39:46.742260 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/52290aa3-eaa8-43dd-a43a-a136b36b5888-host\") pod \"crc-debug-s2gp8\" (UID: \"52290aa3-eaa8-43dd-a43a-a136b36b5888\") " pod="openshift-must-gather-xb4pj/crc-debug-s2gp8" Oct 01 14:39:46 crc kubenswrapper[4605]: I1001 14:39:46.779781 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-69b5p\" (UniqueName: \"kubernetes.io/projected/52290aa3-eaa8-43dd-a43a-a136b36b5888-kube-api-access-69b5p\") pod \"crc-debug-s2gp8\" (UID: \"52290aa3-eaa8-43dd-a43a-a136b36b5888\") " pod="openshift-must-gather-xb4pj/crc-debug-s2gp8" Oct 01 14:39:46 crc kubenswrapper[4605]: I1001 14:39:46.862505 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xb4pj/crc-debug-s2gp8" Oct 01 14:39:47 crc kubenswrapper[4605]: I1001 14:39:47.680248 4605 generic.go:334] "Generic (PLEG): container finished" podID="52290aa3-eaa8-43dd-a43a-a136b36b5888" containerID="d0ec6df4056cfbe792779b26f3914e04dd2f4b46a54c4fa945c75bf9cebb7f32" exitCode=0 Oct 01 14:39:47 crc kubenswrapper[4605]: I1001 14:39:47.680379 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-xb4pj/crc-debug-s2gp8" event={"ID":"52290aa3-eaa8-43dd-a43a-a136b36b5888","Type":"ContainerDied","Data":"d0ec6df4056cfbe792779b26f3914e04dd2f4b46a54c4fa945c75bf9cebb7f32"} Oct 01 14:39:47 crc kubenswrapper[4605]: I1001 14:39:47.680600 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-xb4pj/crc-debug-s2gp8" event={"ID":"52290aa3-eaa8-43dd-a43a-a136b36b5888","Type":"ContainerStarted","Data":"bbb6918065ee0761801fded71988728ee4e443340f5c1e5bd5b7b4b24dbd68c0"} Oct 01 14:39:47 crc kubenswrapper[4605]: I1001 14:39:47.723203 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-xb4pj/crc-debug-s2gp8"] Oct 01 14:39:47 crc kubenswrapper[4605]: I1001 14:39:47.734235 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-xb4pj/crc-debug-s2gp8"] Oct 01 14:39:48 crc kubenswrapper[4605]: I1001 14:39:48.794277 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xb4pj/crc-debug-s2gp8" Oct 01 14:39:48 crc kubenswrapper[4605]: I1001 14:39:48.879266 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-69b5p\" (UniqueName: \"kubernetes.io/projected/52290aa3-eaa8-43dd-a43a-a136b36b5888-kube-api-access-69b5p\") pod \"52290aa3-eaa8-43dd-a43a-a136b36b5888\" (UID: \"52290aa3-eaa8-43dd-a43a-a136b36b5888\") " Oct 01 14:39:48 crc kubenswrapper[4605]: I1001 14:39:48.879430 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/52290aa3-eaa8-43dd-a43a-a136b36b5888-host\") pod \"52290aa3-eaa8-43dd-a43a-a136b36b5888\" (UID: \"52290aa3-eaa8-43dd-a43a-a136b36b5888\") " Oct 01 14:39:48 crc kubenswrapper[4605]: I1001 14:39:48.879708 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/52290aa3-eaa8-43dd-a43a-a136b36b5888-host" (OuterVolumeSpecName: "host") pod "52290aa3-eaa8-43dd-a43a-a136b36b5888" (UID: "52290aa3-eaa8-43dd-a43a-a136b36b5888"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 14:39:48 crc kubenswrapper[4605]: I1001 14:39:48.880024 4605 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/52290aa3-eaa8-43dd-a43a-a136b36b5888-host\") on node \"crc\" DevicePath \"\"" Oct 01 14:39:48 crc kubenswrapper[4605]: I1001 14:39:48.885028 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/52290aa3-eaa8-43dd-a43a-a136b36b5888-kube-api-access-69b5p" (OuterVolumeSpecName: "kube-api-access-69b5p") pod "52290aa3-eaa8-43dd-a43a-a136b36b5888" (UID: "52290aa3-eaa8-43dd-a43a-a136b36b5888"). InnerVolumeSpecName "kube-api-access-69b5p". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:39:48 crc kubenswrapper[4605]: I1001 14:39:48.981882 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-69b5p\" (UniqueName: \"kubernetes.io/projected/52290aa3-eaa8-43dd-a43a-a136b36b5888-kube-api-access-69b5p\") on node \"crc\" DevicePath \"\"" Oct 01 14:39:49 crc kubenswrapper[4605]: I1001 14:39:49.585187 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_1c76bd37ad0c2b23ca2a6f6e5550fa40350084769f929d7952643a875emzcsd_67624804-868e-4db5-9eb8-aaec10c2a4ba/util/0.log" Oct 01 14:39:49 crc kubenswrapper[4605]: I1001 14:39:49.701916 4605 scope.go:117] "RemoveContainer" containerID="d0ec6df4056cfbe792779b26f3914e04dd2f4b46a54c4fa945c75bf9cebb7f32" Oct 01 14:39:49 crc kubenswrapper[4605]: I1001 14:39:49.701940 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xb4pj/crc-debug-s2gp8" Oct 01 14:39:49 crc kubenswrapper[4605]: I1001 14:39:49.776107 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_1c76bd37ad0c2b23ca2a6f6e5550fa40350084769f929d7952643a875emzcsd_67624804-868e-4db5-9eb8-aaec10c2a4ba/pull/0.log" Oct 01 14:39:49 crc kubenswrapper[4605]: I1001 14:39:49.826203 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_1c76bd37ad0c2b23ca2a6f6e5550fa40350084769f929d7952643a875emzcsd_67624804-868e-4db5-9eb8-aaec10c2a4ba/util/0.log" Oct 01 14:39:49 crc kubenswrapper[4605]: I1001 14:39:49.860406 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_1c76bd37ad0c2b23ca2a6f6e5550fa40350084769f929d7952643a875emzcsd_67624804-868e-4db5-9eb8-aaec10c2a4ba/pull/0.log" Oct 01 14:39:49 crc kubenswrapper[4605]: I1001 14:39:49.938699 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="52290aa3-eaa8-43dd-a43a-a136b36b5888" path="/var/lib/kubelet/pods/52290aa3-eaa8-43dd-a43a-a136b36b5888/volumes" Oct 01 14:39:49 crc kubenswrapper[4605]: I1001 14:39:49.987211 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_1c76bd37ad0c2b23ca2a6f6e5550fa40350084769f929d7952643a875emzcsd_67624804-868e-4db5-9eb8-aaec10c2a4ba/util/0.log" Oct 01 14:39:50 crc kubenswrapper[4605]: I1001 14:39:50.014510 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_1c76bd37ad0c2b23ca2a6f6e5550fa40350084769f929d7952643a875emzcsd_67624804-868e-4db5-9eb8-aaec10c2a4ba/extract/0.log" Oct 01 14:39:50 crc kubenswrapper[4605]: I1001 14:39:50.034794 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_1c76bd37ad0c2b23ca2a6f6e5550fa40350084769f929d7952643a875emzcsd_67624804-868e-4db5-9eb8-aaec10c2a4ba/pull/0.log" Oct 01 14:39:50 crc kubenswrapper[4605]: I1001 14:39:50.247303 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-6ff8b75857-td7cl_1c52463d-7f43-422b-b6f2-071553e4efb1/kube-rbac-proxy/0.log" Oct 01 14:39:50 crc kubenswrapper[4605]: I1001 14:39:50.274021 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-6ff8b75857-td7cl_1c52463d-7f43-422b-b6f2-071553e4efb1/manager/0.log" Oct 01 14:39:50 crc kubenswrapper[4605]: I1001 14:39:50.308621 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-644bddb6d8-l6rjg_ddfed60b-8b0b-4481-b9f7-f906dd6413f8/kube-rbac-proxy/0.log" Oct 01 14:39:50 crc kubenswrapper[4605]: I1001 14:39:50.496486 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-84f4f7b77b-k4j9s_2f85ca51-dac6-464b-8da5-b2b35511c3a7/manager/0.log" Oct 01 14:39:50 crc kubenswrapper[4605]: I1001 14:39:50.559111 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-84f4f7b77b-k4j9s_2f85ca51-dac6-464b-8da5-b2b35511c3a7/kube-rbac-proxy/0.log" Oct 01 14:39:50 crc kubenswrapper[4605]: I1001 14:39:50.689042 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-84958c4d49-r2t7s_685ab06d-d56b-429c-b196-3f2576a63ad5/kube-rbac-proxy/0.log" Oct 01 14:39:50 crc kubenswrapper[4605]: I1001 14:39:50.709920 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-644bddb6d8-l6rjg_ddfed60b-8b0b-4481-b9f7-f906dd6413f8/manager/0.log" Oct 01 14:39:50 crc kubenswrapper[4605]: I1001 14:39:50.876816 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-84958c4d49-r2t7s_685ab06d-d56b-429c-b196-3f2576a63ad5/manager/0.log" Oct 01 14:39:50 crc kubenswrapper[4605]: I1001 14:39:50.942915 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5d889d78cf-bc7cq_6728814d-8d86-4255-8e33-c2205cc3421b/kube-rbac-proxy/0.log" Oct 01 14:39:50 crc kubenswrapper[4605]: I1001 14:39:50.949477 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5d889d78cf-bc7cq_6728814d-8d86-4255-8e33-c2205cc3421b/manager/0.log" Oct 01 14:39:51 crc kubenswrapper[4605]: I1001 14:39:51.131814 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-9f4696d94-4gzrq_114ac89a-6b52-4e58-8ec7-1a5ebe953e46/kube-rbac-proxy/0.log" Oct 01 14:39:51 crc kubenswrapper[4605]: I1001 14:39:51.186253 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-9f4696d94-4gzrq_114ac89a-6b52-4e58-8ec7-1a5ebe953e46/manager/0.log" Oct 01 14:39:51 crc kubenswrapper[4605]: I1001 14:39:51.514006 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-9d6c5db85-p5vqs_769cd151-8943-4faa-876c-e91d749ef107/kube-rbac-proxy/0.log" Oct 01 14:39:51 crc kubenswrapper[4605]: I1001 14:39:51.631729 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-9d6c5db85-p5vqs_769cd151-8943-4faa-876c-e91d749ef107/manager/0.log" Oct 01 14:39:51 crc kubenswrapper[4605]: I1001 14:39:51.669502 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-5cd4858477-sb7tm_a3fbdb59-b188-4842-af73-d3c68afd58ff/kube-rbac-proxy/0.log" Oct 01 14:39:51 crc kubenswrapper[4605]: I1001 14:39:51.787187 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-5cd4858477-sb7tm_a3fbdb59-b188-4842-af73-d3c68afd58ff/manager/0.log" Oct 01 14:39:51 crc kubenswrapper[4605]: I1001 14:39:51.899480 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-665ff6bffd-fdn7m_416364d4-8fac-4979-b4f1-e1f009f0b8cd/kube-rbac-proxy/0.log" Oct 01 14:39:52 crc kubenswrapper[4605]: I1001 14:39:52.012331 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-665ff6bffd-fdn7m_416364d4-8fac-4979-b4f1-e1f009f0b8cd/manager/0.log" Oct 01 14:39:52 crc kubenswrapper[4605]: I1001 14:39:52.153688 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-6d68dbc695-kxwrs_e578b0b7-de64-4492-9ab3-b8b73ebd0909/manager/0.log" Oct 01 14:39:52 crc kubenswrapper[4605]: I1001 14:39:52.185895 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-6d68dbc695-kxwrs_e578b0b7-de64-4492-9ab3-b8b73ebd0909/kube-rbac-proxy/0.log" Oct 01 14:39:52 crc kubenswrapper[4605]: I1001 14:39:52.266500 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-88c7-rscdq_5db7ac6b-c1e1-4640-943f-9db9a460e625/kube-rbac-proxy/0.log" Oct 01 14:39:52 crc kubenswrapper[4605]: I1001 14:39:52.397032 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-88c7-rscdq_5db7ac6b-c1e1-4640-943f-9db9a460e625/manager/0.log" Oct 01 14:39:52 crc kubenswrapper[4605]: I1001 14:39:52.593999 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-849d5b9b84-b2fzm_6ee4d18f-3f02-49c4-943c-534e47601be5/manager/0.log" Oct 01 14:39:52 crc kubenswrapper[4605]: I1001 14:39:52.612875 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-849d5b9b84-b2fzm_6ee4d18f-3f02-49c4-943c-534e47601be5/kube-rbac-proxy/0.log" Oct 01 14:39:52 crc kubenswrapper[4605]: I1001 14:39:52.832243 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-64cd67b5cb-xrh9t_21d80fce-11c1-4ca0-8687-dc2bb6ced356/kube-rbac-proxy/0.log" Oct 01 14:39:52 crc kubenswrapper[4605]: I1001 14:39:52.949386 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-64cd67b5cb-xrh9t_21d80fce-11c1-4ca0-8687-dc2bb6ced356/manager/0.log" Oct 01 14:39:53 crc kubenswrapper[4605]: I1001 14:39:53.043207 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-7b787867f4-cx7q7_f105c6d3-5a2b-442c-ad1c-bcffd3fd869b/kube-rbac-proxy/0.log" Oct 01 14:39:53 crc kubenswrapper[4605]: I1001 14:39:53.057344 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-7b787867f4-cx7q7_f105c6d3-5a2b-442c-ad1c-bcffd3fd869b/manager/0.log" Oct 01 14:39:53 crc kubenswrapper[4605]: I1001 14:39:53.424670 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-77b9676b8cqd6pb_4c039eff-8d65-45d5-9c1a-9fddca3c5e57/manager/0.log" Oct 01 14:39:53 crc kubenswrapper[4605]: I1001 14:39:53.442791 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-77b9676b8cqd6pb_4c039eff-8d65-45d5-9c1a-9fddca3c5e57/kube-rbac-proxy/0.log" Oct 01 14:39:53 crc kubenswrapper[4605]: I1001 14:39:53.502960 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-6565f9cdf-zgq4z_7ba2ae30-a4df-43a7-b6bc-89814bd65ab7/kube-rbac-proxy/0.log" Oct 01 14:39:53 crc kubenswrapper[4605]: I1001 14:39:53.777239 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-5b5f977c9c-ztrlb_471dd8b1-617a-4cba-a9e6-0fc59bfc4b6a/kube-rbac-proxy/0.log" Oct 01 14:39:53 crc kubenswrapper[4605]: I1001 14:39:53.975524 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-5b5f977c9c-ztrlb_471dd8b1-617a-4cba-a9e6-0fc59bfc4b6a/operator/0.log" Oct 01 14:39:53 crc kubenswrapper[4605]: I1001 14:39:53.988230 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-8vx9r_c2be49ad-0f11-4479-b725-29854a0c1b8f/registry-server/0.log" Oct 01 14:39:54 crc kubenswrapper[4605]: I1001 14:39:54.256770 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-9976ff44c-8sgk8_d861e141-379a-4d47-bca7-bff86972afaa/kube-rbac-proxy/0.log" Oct 01 14:39:54 crc kubenswrapper[4605]: I1001 14:39:54.289855 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-9976ff44c-8sgk8_d861e141-379a-4d47-bca7-bff86972afaa/manager/0.log" Oct 01 14:39:54 crc kubenswrapper[4605]: I1001 14:39:54.442133 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-589c58c6c-tf7ln_97fdc8f2-0472-4957-a59a-fd9474c0d15c/kube-rbac-proxy/0.log" Oct 01 14:39:54 crc kubenswrapper[4605]: I1001 14:39:54.585358 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-589c58c6c-tf7ln_97fdc8f2-0472-4957-a59a-fd9474c0d15c/manager/0.log" Oct 01 14:39:54 crc kubenswrapper[4605]: I1001 14:39:54.632041 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-5f97d8c699-9dhn9_0de7ac6f-ac16-4f83-8e06-10c9b2500491/operator/0.log" Oct 01 14:39:54 crc kubenswrapper[4605]: I1001 14:39:54.722263 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-6565f9cdf-zgq4z_7ba2ae30-a4df-43a7-b6bc-89814bd65ab7/manager/0.log" Oct 01 14:39:54 crc kubenswrapper[4605]: I1001 14:39:54.838206 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-84d6b4b759-xdntp_34738360-dd91-4a55-b6d2-ab69d1bb5db4/kube-rbac-proxy/0.log" Oct 01 14:39:54 crc kubenswrapper[4605]: I1001 14:39:54.906002 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-b8d54b5d7-x8fvl_8dcbd1dd-75c6-40ff-a9ea-267f9be92433/kube-rbac-proxy/0.log" Oct 01 14:39:54 crc kubenswrapper[4605]: I1001 14:39:54.926919 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-84d6b4b759-xdntp_34738360-dd91-4a55-b6d2-ab69d1bb5db4/manager/0.log" Oct 01 14:39:54 crc kubenswrapper[4605]: I1001 14:39:54.961867 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-b8d54b5d7-x8fvl_8dcbd1dd-75c6-40ff-a9ea-267f9be92433/manager/0.log" Oct 01 14:39:55 crc kubenswrapper[4605]: I1001 14:39:55.091813 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-85777745bb-8rcr7_6e215b5d-0b5c-4587-b8ab-f2a63fb41cb1/manager/0.log" Oct 01 14:39:55 crc kubenswrapper[4605]: I1001 14:39:55.135406 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-85777745bb-8rcr7_6e215b5d-0b5c-4587-b8ab-f2a63fb41cb1/kube-rbac-proxy/0.log" Oct 01 14:39:55 crc kubenswrapper[4605]: I1001 14:39:55.212280 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-6b9957f54f-ct9tb_baf8d82f-41e9-417a-9e88-4320b65d7c6c/kube-rbac-proxy/0.log" Oct 01 14:39:55 crc kubenswrapper[4605]: I1001 14:39:55.280707 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-6b9957f54f-ct9tb_baf8d82f-41e9-417a-9e88-4320b65d7c6c/manager/0.log" Oct 01 14:39:56 crc kubenswrapper[4605]: I1001 14:39:56.753147 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-5lgfm"] Oct 01 14:39:56 crc kubenswrapper[4605]: E1001 14:39:56.753785 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52290aa3-eaa8-43dd-a43a-a136b36b5888" containerName="container-00" Oct 01 14:39:56 crc kubenswrapper[4605]: I1001 14:39:56.753797 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="52290aa3-eaa8-43dd-a43a-a136b36b5888" containerName="container-00" Oct 01 14:39:56 crc kubenswrapper[4605]: I1001 14:39:56.754001 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="52290aa3-eaa8-43dd-a43a-a136b36b5888" containerName="container-00" Oct 01 14:39:56 crc kubenswrapper[4605]: I1001 14:39:56.756112 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5lgfm" Oct 01 14:39:56 crc kubenswrapper[4605]: I1001 14:39:56.772675 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5lgfm"] Oct 01 14:39:56 crc kubenswrapper[4605]: I1001 14:39:56.822580 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hzq4n\" (UniqueName: \"kubernetes.io/projected/2c60439f-07bf-4032-8fe8-f98044a402e9-kube-api-access-hzq4n\") pod \"redhat-marketplace-5lgfm\" (UID: \"2c60439f-07bf-4032-8fe8-f98044a402e9\") " pod="openshift-marketplace/redhat-marketplace-5lgfm" Oct 01 14:39:56 crc kubenswrapper[4605]: I1001 14:39:56.822654 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c60439f-07bf-4032-8fe8-f98044a402e9-catalog-content\") pod \"redhat-marketplace-5lgfm\" (UID: \"2c60439f-07bf-4032-8fe8-f98044a402e9\") " pod="openshift-marketplace/redhat-marketplace-5lgfm" Oct 01 14:39:56 crc kubenswrapper[4605]: I1001 14:39:56.822758 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c60439f-07bf-4032-8fe8-f98044a402e9-utilities\") pod \"redhat-marketplace-5lgfm\" (UID: \"2c60439f-07bf-4032-8fe8-f98044a402e9\") " pod="openshift-marketplace/redhat-marketplace-5lgfm" Oct 01 14:39:56 crc kubenswrapper[4605]: I1001 14:39:56.925239 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hzq4n\" (UniqueName: \"kubernetes.io/projected/2c60439f-07bf-4032-8fe8-f98044a402e9-kube-api-access-hzq4n\") pod \"redhat-marketplace-5lgfm\" (UID: \"2c60439f-07bf-4032-8fe8-f98044a402e9\") " pod="openshift-marketplace/redhat-marketplace-5lgfm" Oct 01 14:39:56 crc kubenswrapper[4605]: I1001 14:39:56.925297 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c60439f-07bf-4032-8fe8-f98044a402e9-catalog-content\") pod \"redhat-marketplace-5lgfm\" (UID: \"2c60439f-07bf-4032-8fe8-f98044a402e9\") " pod="openshift-marketplace/redhat-marketplace-5lgfm" Oct 01 14:39:56 crc kubenswrapper[4605]: I1001 14:39:56.925343 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c60439f-07bf-4032-8fe8-f98044a402e9-utilities\") pod \"redhat-marketplace-5lgfm\" (UID: \"2c60439f-07bf-4032-8fe8-f98044a402e9\") " pod="openshift-marketplace/redhat-marketplace-5lgfm" Oct 01 14:39:56 crc kubenswrapper[4605]: I1001 14:39:56.925920 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c60439f-07bf-4032-8fe8-f98044a402e9-utilities\") pod \"redhat-marketplace-5lgfm\" (UID: \"2c60439f-07bf-4032-8fe8-f98044a402e9\") " pod="openshift-marketplace/redhat-marketplace-5lgfm" Oct 01 14:39:56 crc kubenswrapper[4605]: I1001 14:39:56.925962 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c60439f-07bf-4032-8fe8-f98044a402e9-catalog-content\") pod \"redhat-marketplace-5lgfm\" (UID: \"2c60439f-07bf-4032-8fe8-f98044a402e9\") " pod="openshift-marketplace/redhat-marketplace-5lgfm" Oct 01 14:39:56 crc kubenswrapper[4605]: I1001 14:39:56.946262 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hzq4n\" (UniqueName: \"kubernetes.io/projected/2c60439f-07bf-4032-8fe8-f98044a402e9-kube-api-access-hzq4n\") pod \"redhat-marketplace-5lgfm\" (UID: \"2c60439f-07bf-4032-8fe8-f98044a402e9\") " pod="openshift-marketplace/redhat-marketplace-5lgfm" Oct 01 14:39:57 crc kubenswrapper[4605]: I1001 14:39:57.089552 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5lgfm" Oct 01 14:39:58 crc kubenswrapper[4605]: I1001 14:39:58.276728 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5lgfm"] Oct 01 14:39:58 crc kubenswrapper[4605]: E1001 14:39:58.715301 4605 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2c60439f_07bf_4032_8fe8_f98044a402e9.slice/crio-conmon-fb09290f3617a2df7d49bff4c802acda5fb0a0a63cca4a9840eada2447557c44.scope\": RecentStats: unable to find data in memory cache]" Oct 01 14:39:58 crc kubenswrapper[4605]: I1001 14:39:58.847162 4605 generic.go:334] "Generic (PLEG): container finished" podID="2c60439f-07bf-4032-8fe8-f98044a402e9" containerID="fb09290f3617a2df7d49bff4c802acda5fb0a0a63cca4a9840eada2447557c44" exitCode=0 Oct 01 14:39:58 crc kubenswrapper[4605]: I1001 14:39:58.847200 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5lgfm" event={"ID":"2c60439f-07bf-4032-8fe8-f98044a402e9","Type":"ContainerDied","Data":"fb09290f3617a2df7d49bff4c802acda5fb0a0a63cca4a9840eada2447557c44"} Oct 01 14:39:58 crc kubenswrapper[4605]: I1001 14:39:58.847577 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5lgfm" event={"ID":"2c60439f-07bf-4032-8fe8-f98044a402e9","Type":"ContainerStarted","Data":"f053d34ea9d77439f82577d514777afd2b7cff92de43bdf2f84432b4297b74d8"} Oct 01 14:40:00 crc kubenswrapper[4605]: I1001 14:40:00.866534 4605 generic.go:334] "Generic (PLEG): container finished" podID="2c60439f-07bf-4032-8fe8-f98044a402e9" containerID="99e46979d16f7cd6d8932ce48b6384369a56491eebeb0d133bddb35fcfe3167b" exitCode=0 Oct 01 14:40:00 crc kubenswrapper[4605]: I1001 14:40:00.866589 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5lgfm" event={"ID":"2c60439f-07bf-4032-8fe8-f98044a402e9","Type":"ContainerDied","Data":"99e46979d16f7cd6d8932ce48b6384369a56491eebeb0d133bddb35fcfe3167b"} Oct 01 14:40:01 crc kubenswrapper[4605]: I1001 14:40:01.878421 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5lgfm" event={"ID":"2c60439f-07bf-4032-8fe8-f98044a402e9","Type":"ContainerStarted","Data":"4736dafb0b7266c2fdebed60797fb94ee4c96908f4081ac9a38a61b615915194"} Oct 01 14:40:01 crc kubenswrapper[4605]: I1001 14:40:01.899210 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-5lgfm" podStartSLOduration=3.459183449 podStartE2EDuration="5.899190939s" podCreationTimestamp="2025-10-01 14:39:56 +0000 UTC" firstStartedPulling="2025-10-01 14:39:58.84968581 +0000 UTC m=+3321.593662018" lastFinishedPulling="2025-10-01 14:40:01.2896933 +0000 UTC m=+3324.033669508" observedRunningTime="2025-10-01 14:40:01.895882956 +0000 UTC m=+3324.639859164" watchObservedRunningTime="2025-10-01 14:40:01.899190939 +0000 UTC m=+3324.643167147" Oct 01 14:40:07 crc kubenswrapper[4605]: I1001 14:40:07.090034 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-5lgfm" Oct 01 14:40:07 crc kubenswrapper[4605]: I1001 14:40:07.090589 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-5lgfm" Oct 01 14:40:07 crc kubenswrapper[4605]: I1001 14:40:07.154768 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-5lgfm" Oct 01 14:40:07 crc kubenswrapper[4605]: I1001 14:40:07.974404 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-5lgfm" Oct 01 14:40:08 crc kubenswrapper[4605]: I1001 14:40:08.021222 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-5lgfm"] Oct 01 14:40:09 crc kubenswrapper[4605]: I1001 14:40:09.946974 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-5lgfm" podUID="2c60439f-07bf-4032-8fe8-f98044a402e9" containerName="registry-server" containerID="cri-o://4736dafb0b7266c2fdebed60797fb94ee4c96908f4081ac9a38a61b615915194" gracePeriod=2 Oct 01 14:40:11 crc kubenswrapper[4605]: I1001 14:40:10.597575 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5lgfm" Oct 01 14:40:11 crc kubenswrapper[4605]: I1001 14:40:10.699131 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c60439f-07bf-4032-8fe8-f98044a402e9-utilities\") pod \"2c60439f-07bf-4032-8fe8-f98044a402e9\" (UID: \"2c60439f-07bf-4032-8fe8-f98044a402e9\") " Oct 01 14:40:11 crc kubenswrapper[4605]: I1001 14:40:10.699188 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c60439f-07bf-4032-8fe8-f98044a402e9-catalog-content\") pod \"2c60439f-07bf-4032-8fe8-f98044a402e9\" (UID: \"2c60439f-07bf-4032-8fe8-f98044a402e9\") " Oct 01 14:40:11 crc kubenswrapper[4605]: I1001 14:40:10.699255 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hzq4n\" (UniqueName: \"kubernetes.io/projected/2c60439f-07bf-4032-8fe8-f98044a402e9-kube-api-access-hzq4n\") pod \"2c60439f-07bf-4032-8fe8-f98044a402e9\" (UID: \"2c60439f-07bf-4032-8fe8-f98044a402e9\") " Oct 01 14:40:11 crc kubenswrapper[4605]: I1001 14:40:10.700606 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2c60439f-07bf-4032-8fe8-f98044a402e9-utilities" (OuterVolumeSpecName: "utilities") pod "2c60439f-07bf-4032-8fe8-f98044a402e9" (UID: "2c60439f-07bf-4032-8fe8-f98044a402e9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:40:11 crc kubenswrapper[4605]: I1001 14:40:10.711590 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2c60439f-07bf-4032-8fe8-f98044a402e9-kube-api-access-hzq4n" (OuterVolumeSpecName: "kube-api-access-hzq4n") pod "2c60439f-07bf-4032-8fe8-f98044a402e9" (UID: "2c60439f-07bf-4032-8fe8-f98044a402e9"). InnerVolumeSpecName "kube-api-access-hzq4n". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:40:11 crc kubenswrapper[4605]: I1001 14:40:10.729955 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2c60439f-07bf-4032-8fe8-f98044a402e9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2c60439f-07bf-4032-8fe8-f98044a402e9" (UID: "2c60439f-07bf-4032-8fe8-f98044a402e9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:40:11 crc kubenswrapper[4605]: I1001 14:40:10.801272 4605 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c60439f-07bf-4032-8fe8-f98044a402e9-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 14:40:11 crc kubenswrapper[4605]: I1001 14:40:10.801326 4605 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c60439f-07bf-4032-8fe8-f98044a402e9-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 14:40:11 crc kubenswrapper[4605]: I1001 14:40:10.801339 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hzq4n\" (UniqueName: \"kubernetes.io/projected/2c60439f-07bf-4032-8fe8-f98044a402e9-kube-api-access-hzq4n\") on node \"crc\" DevicePath \"\"" Oct 01 14:40:11 crc kubenswrapper[4605]: I1001 14:40:10.958412 4605 generic.go:334] "Generic (PLEG): container finished" podID="2c60439f-07bf-4032-8fe8-f98044a402e9" containerID="4736dafb0b7266c2fdebed60797fb94ee4c96908f4081ac9a38a61b615915194" exitCode=0 Oct 01 14:40:11 crc kubenswrapper[4605]: I1001 14:40:10.958469 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5lgfm" Oct 01 14:40:11 crc kubenswrapper[4605]: I1001 14:40:10.958487 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5lgfm" event={"ID":"2c60439f-07bf-4032-8fe8-f98044a402e9","Type":"ContainerDied","Data":"4736dafb0b7266c2fdebed60797fb94ee4c96908f4081ac9a38a61b615915194"} Oct 01 14:40:11 crc kubenswrapper[4605]: I1001 14:40:10.959450 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5lgfm" event={"ID":"2c60439f-07bf-4032-8fe8-f98044a402e9","Type":"ContainerDied","Data":"f053d34ea9d77439f82577d514777afd2b7cff92de43bdf2f84432b4297b74d8"} Oct 01 14:40:11 crc kubenswrapper[4605]: I1001 14:40:10.959471 4605 scope.go:117] "RemoveContainer" containerID="4736dafb0b7266c2fdebed60797fb94ee4c96908f4081ac9a38a61b615915194" Oct 01 14:40:11 crc kubenswrapper[4605]: I1001 14:40:11.017372 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-5lgfm"] Oct 01 14:40:11 crc kubenswrapper[4605]: I1001 14:40:11.024360 4605 scope.go:117] "RemoveContainer" containerID="99e46979d16f7cd6d8932ce48b6384369a56491eebeb0d133bddb35fcfe3167b" Oct 01 14:40:11 crc kubenswrapper[4605]: I1001 14:40:11.033019 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-5lgfm"] Oct 01 14:40:11 crc kubenswrapper[4605]: I1001 14:40:11.068360 4605 scope.go:117] "RemoveContainer" containerID="fb09290f3617a2df7d49bff4c802acda5fb0a0a63cca4a9840eada2447557c44" Oct 01 14:40:11 crc kubenswrapper[4605]: I1001 14:40:11.130660 4605 scope.go:117] "RemoveContainer" containerID="4736dafb0b7266c2fdebed60797fb94ee4c96908f4081ac9a38a61b615915194" Oct 01 14:40:11 crc kubenswrapper[4605]: E1001 14:40:11.131145 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4736dafb0b7266c2fdebed60797fb94ee4c96908f4081ac9a38a61b615915194\": container with ID starting with 4736dafb0b7266c2fdebed60797fb94ee4c96908f4081ac9a38a61b615915194 not found: ID does not exist" containerID="4736dafb0b7266c2fdebed60797fb94ee4c96908f4081ac9a38a61b615915194" Oct 01 14:40:11 crc kubenswrapper[4605]: I1001 14:40:11.131168 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4736dafb0b7266c2fdebed60797fb94ee4c96908f4081ac9a38a61b615915194"} err="failed to get container status \"4736dafb0b7266c2fdebed60797fb94ee4c96908f4081ac9a38a61b615915194\": rpc error: code = NotFound desc = could not find container \"4736dafb0b7266c2fdebed60797fb94ee4c96908f4081ac9a38a61b615915194\": container with ID starting with 4736dafb0b7266c2fdebed60797fb94ee4c96908f4081ac9a38a61b615915194 not found: ID does not exist" Oct 01 14:40:11 crc kubenswrapper[4605]: I1001 14:40:11.131187 4605 scope.go:117] "RemoveContainer" containerID="99e46979d16f7cd6d8932ce48b6384369a56491eebeb0d133bddb35fcfe3167b" Oct 01 14:40:11 crc kubenswrapper[4605]: E1001 14:40:11.131499 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"99e46979d16f7cd6d8932ce48b6384369a56491eebeb0d133bddb35fcfe3167b\": container with ID starting with 99e46979d16f7cd6d8932ce48b6384369a56491eebeb0d133bddb35fcfe3167b not found: ID does not exist" containerID="99e46979d16f7cd6d8932ce48b6384369a56491eebeb0d133bddb35fcfe3167b" Oct 01 14:40:11 crc kubenswrapper[4605]: I1001 14:40:11.131515 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"99e46979d16f7cd6d8932ce48b6384369a56491eebeb0d133bddb35fcfe3167b"} err="failed to get container status \"99e46979d16f7cd6d8932ce48b6384369a56491eebeb0d133bddb35fcfe3167b\": rpc error: code = NotFound desc = could not find container \"99e46979d16f7cd6d8932ce48b6384369a56491eebeb0d133bddb35fcfe3167b\": container with ID starting with 99e46979d16f7cd6d8932ce48b6384369a56491eebeb0d133bddb35fcfe3167b not found: ID does not exist" Oct 01 14:40:11 crc kubenswrapper[4605]: I1001 14:40:11.131528 4605 scope.go:117] "RemoveContainer" containerID="fb09290f3617a2df7d49bff4c802acda5fb0a0a63cca4a9840eada2447557c44" Oct 01 14:40:11 crc kubenswrapper[4605]: E1001 14:40:11.131683 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fb09290f3617a2df7d49bff4c802acda5fb0a0a63cca4a9840eada2447557c44\": container with ID starting with fb09290f3617a2df7d49bff4c802acda5fb0a0a63cca4a9840eada2447557c44 not found: ID does not exist" containerID="fb09290f3617a2df7d49bff4c802acda5fb0a0a63cca4a9840eada2447557c44" Oct 01 14:40:11 crc kubenswrapper[4605]: I1001 14:40:11.131698 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fb09290f3617a2df7d49bff4c802acda5fb0a0a63cca4a9840eada2447557c44"} err="failed to get container status \"fb09290f3617a2df7d49bff4c802acda5fb0a0a63cca4a9840eada2447557c44\": rpc error: code = NotFound desc = could not find container \"fb09290f3617a2df7d49bff4c802acda5fb0a0a63cca4a9840eada2447557c44\": container with ID starting with fb09290f3617a2df7d49bff4c802acda5fb0a0a63cca4a9840eada2447557c44 not found: ID does not exist" Oct 01 14:40:11 crc kubenswrapper[4605]: I1001 14:40:11.717480 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-pwm5c_16842335-be0d-4f69-b0af-e98b21c572ab/control-plane-machine-set-operator/0.log" Oct 01 14:40:11 crc kubenswrapper[4605]: I1001 14:40:11.936692 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2c60439f-07bf-4032-8fe8-f98044a402e9" path="/var/lib/kubelet/pods/2c60439f-07bf-4032-8fe8-f98044a402e9/volumes" Oct 01 14:40:11 crc kubenswrapper[4605]: I1001 14:40:11.971199 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-2k22x_762e836a-1722-4e01-982d-023b84748aa4/kube-rbac-proxy/0.log" Oct 01 14:40:12 crc kubenswrapper[4605]: I1001 14:40:12.000529 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-2k22x_762e836a-1722-4e01-982d-023b84748aa4/machine-api-operator/0.log" Oct 01 14:40:23 crc kubenswrapper[4605]: I1001 14:40:23.278534 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-rt7m9_ce49c1e2-5b54-442f-9c7c-5242886a218a/cert-manager-controller/0.log" Oct 01 14:40:23 crc kubenswrapper[4605]: I1001 14:40:23.467548 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-7cs6w_365338db-6cc8-4281-a9a8-665a9c64a850/cert-manager-cainjector/0.log" Oct 01 14:40:23 crc kubenswrapper[4605]: I1001 14:40:23.500570 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-478v7_0fccd814-2572-49cb-b325-549214e05fc2/cert-manager-webhook/0.log" Oct 01 14:40:34 crc kubenswrapper[4605]: I1001 14:40:34.647245 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-864bb6dfb5-w9zqn_cceecdf3-965b-4939-a871-628e73d1ce1e/nmstate-console-plugin/0.log" Oct 01 14:40:34 crc kubenswrapper[4605]: I1001 14:40:34.821785 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-58fcddf996-k28lm_8f826dfa-48c9-42ed-8f62-e3ae00653a07/kube-rbac-proxy/0.log" Oct 01 14:40:34 crc kubenswrapper[4605]: I1001 14:40:34.824264 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-txcp9_d9977c55-dbd0-45e6-8483-5b0e5a279566/nmstate-handler/0.log" Oct 01 14:40:34 crc kubenswrapper[4605]: I1001 14:40:34.913251 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-58fcddf996-k28lm_8f826dfa-48c9-42ed-8f62-e3ae00653a07/nmstate-metrics/0.log" Oct 01 14:40:35 crc kubenswrapper[4605]: I1001 14:40:35.037225 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5d6f6cfd66-mmf2p_fc364b6e-b66d-4634-890a-f2eaed00901e/nmstate-operator/0.log" Oct 01 14:40:35 crc kubenswrapper[4605]: I1001 14:40:35.131213 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-6d689559c5-dgsw9_f75cf276-8bcb-4ab4-bed4-d12b1252691f/nmstate-webhook/0.log" Oct 01 14:40:48 crc kubenswrapper[4605]: I1001 14:40:48.283080 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-5d688f5ffc-jlb75_c9b364bc-86c5-4b79-b38a-d8bbf447be04/kube-rbac-proxy/0.log" Oct 01 14:40:48 crc kubenswrapper[4605]: I1001 14:40:48.458326 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-5d688f5ffc-jlb75_c9b364bc-86c5-4b79-b38a-d8bbf447be04/controller/0.log" Oct 01 14:40:48 crc kubenswrapper[4605]: I1001 14:40:48.545763 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6g94f_3476bcf6-f4c4-4db8-8ec9-b567c7d55872/cp-frr-files/0.log" Oct 01 14:40:48 crc kubenswrapper[4605]: I1001 14:40:48.747018 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6g94f_3476bcf6-f4c4-4db8-8ec9-b567c7d55872/cp-frr-files/0.log" Oct 01 14:40:48 crc kubenswrapper[4605]: I1001 14:40:48.772543 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6g94f_3476bcf6-f4c4-4db8-8ec9-b567c7d55872/cp-reloader/0.log" Oct 01 14:40:48 crc kubenswrapper[4605]: I1001 14:40:48.772977 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6g94f_3476bcf6-f4c4-4db8-8ec9-b567c7d55872/cp-metrics/0.log" Oct 01 14:40:48 crc kubenswrapper[4605]: I1001 14:40:48.775141 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6g94f_3476bcf6-f4c4-4db8-8ec9-b567c7d55872/cp-reloader/0.log" Oct 01 14:40:48 crc kubenswrapper[4605]: I1001 14:40:48.992215 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6g94f_3476bcf6-f4c4-4db8-8ec9-b567c7d55872/cp-metrics/0.log" Oct 01 14:40:49 crc kubenswrapper[4605]: I1001 14:40:49.022991 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6g94f_3476bcf6-f4c4-4db8-8ec9-b567c7d55872/cp-reloader/0.log" Oct 01 14:40:49 crc kubenswrapper[4605]: I1001 14:40:49.070198 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6g94f_3476bcf6-f4c4-4db8-8ec9-b567c7d55872/cp-frr-files/0.log" Oct 01 14:40:49 crc kubenswrapper[4605]: I1001 14:40:49.085833 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6g94f_3476bcf6-f4c4-4db8-8ec9-b567c7d55872/cp-metrics/0.log" Oct 01 14:40:49 crc kubenswrapper[4605]: I1001 14:40:49.278371 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6g94f_3476bcf6-f4c4-4db8-8ec9-b567c7d55872/cp-reloader/0.log" Oct 01 14:40:49 crc kubenswrapper[4605]: I1001 14:40:49.295501 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6g94f_3476bcf6-f4c4-4db8-8ec9-b567c7d55872/cp-frr-files/0.log" Oct 01 14:40:49 crc kubenswrapper[4605]: I1001 14:40:49.338412 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6g94f_3476bcf6-f4c4-4db8-8ec9-b567c7d55872/cp-metrics/0.log" Oct 01 14:40:49 crc kubenswrapper[4605]: I1001 14:40:49.339789 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6g94f_3476bcf6-f4c4-4db8-8ec9-b567c7d55872/controller/0.log" Oct 01 14:40:49 crc kubenswrapper[4605]: I1001 14:40:49.535209 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6g94f_3476bcf6-f4c4-4db8-8ec9-b567c7d55872/frr-metrics/0.log" Oct 01 14:40:49 crc kubenswrapper[4605]: I1001 14:40:49.577564 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6g94f_3476bcf6-f4c4-4db8-8ec9-b567c7d55872/kube-rbac-proxy/0.log" Oct 01 14:40:49 crc kubenswrapper[4605]: I1001 14:40:49.595728 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6g94f_3476bcf6-f4c4-4db8-8ec9-b567c7d55872/kube-rbac-proxy-frr/0.log" Oct 01 14:40:49 crc kubenswrapper[4605]: I1001 14:40:49.789807 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6g94f_3476bcf6-f4c4-4db8-8ec9-b567c7d55872/reloader/0.log" Oct 01 14:40:49 crc kubenswrapper[4605]: I1001 14:40:49.835119 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-5478bdb765-bxrk6_3c7dd5c0-7119-4a6a-838d-a41cc422a655/frr-k8s-webhook-server/0.log" Oct 01 14:40:50 crc kubenswrapper[4605]: I1001 14:40:50.202921 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-b7f675c95-dx7nr_3fa2d3ac-c6f5-40c4-96e7-88eaa6d1622c/manager/0.log" Oct 01 14:40:50 crc kubenswrapper[4605]: I1001 14:40:50.279894 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-6b6874df99-s8qkk_d59274d2-8c52-4997-8e43-aab1e6f5ddd0/webhook-server/0.log" Oct 01 14:40:50 crc kubenswrapper[4605]: I1001 14:40:50.480775 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-xrzw2_369f1f43-8697-402d-a370-f72c820ddf13/kube-rbac-proxy/0.log" Oct 01 14:40:50 crc kubenswrapper[4605]: I1001 14:40:50.600051 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6g94f_3476bcf6-f4c4-4db8-8ec9-b567c7d55872/frr/0.log" Oct 01 14:40:50 crc kubenswrapper[4605]: I1001 14:40:50.885777 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-xrzw2_369f1f43-8697-402d-a370-f72c820ddf13/speaker/0.log" Oct 01 14:40:59 crc kubenswrapper[4605]: I1001 14:40:59.726196 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-rh5sq"] Oct 01 14:40:59 crc kubenswrapper[4605]: E1001 14:40:59.727024 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c60439f-07bf-4032-8fe8-f98044a402e9" containerName="registry-server" Oct 01 14:40:59 crc kubenswrapper[4605]: I1001 14:40:59.727037 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c60439f-07bf-4032-8fe8-f98044a402e9" containerName="registry-server" Oct 01 14:40:59 crc kubenswrapper[4605]: E1001 14:40:59.727068 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c60439f-07bf-4032-8fe8-f98044a402e9" containerName="extract-utilities" Oct 01 14:40:59 crc kubenswrapper[4605]: I1001 14:40:59.727077 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c60439f-07bf-4032-8fe8-f98044a402e9" containerName="extract-utilities" Oct 01 14:40:59 crc kubenswrapper[4605]: E1001 14:40:59.727145 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c60439f-07bf-4032-8fe8-f98044a402e9" containerName="extract-content" Oct 01 14:40:59 crc kubenswrapper[4605]: I1001 14:40:59.727152 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c60439f-07bf-4032-8fe8-f98044a402e9" containerName="extract-content" Oct 01 14:40:59 crc kubenswrapper[4605]: I1001 14:40:59.727364 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="2c60439f-07bf-4032-8fe8-f98044a402e9" containerName="registry-server" Oct 01 14:40:59 crc kubenswrapper[4605]: I1001 14:40:59.728682 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rh5sq" Oct 01 14:40:59 crc kubenswrapper[4605]: I1001 14:40:59.737368 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rh5sq"] Oct 01 14:40:59 crc kubenswrapper[4605]: I1001 14:40:59.855435 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fzvrf\" (UniqueName: \"kubernetes.io/projected/0632cad6-19b5-434a-b670-e3afe01e3143-kube-api-access-fzvrf\") pod \"redhat-operators-rh5sq\" (UID: \"0632cad6-19b5-434a-b670-e3afe01e3143\") " pod="openshift-marketplace/redhat-operators-rh5sq" Oct 01 14:40:59 crc kubenswrapper[4605]: I1001 14:40:59.856117 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0632cad6-19b5-434a-b670-e3afe01e3143-catalog-content\") pod \"redhat-operators-rh5sq\" (UID: \"0632cad6-19b5-434a-b670-e3afe01e3143\") " pod="openshift-marketplace/redhat-operators-rh5sq" Oct 01 14:40:59 crc kubenswrapper[4605]: I1001 14:40:59.856239 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0632cad6-19b5-434a-b670-e3afe01e3143-utilities\") pod \"redhat-operators-rh5sq\" (UID: \"0632cad6-19b5-434a-b670-e3afe01e3143\") " pod="openshift-marketplace/redhat-operators-rh5sq" Oct 01 14:40:59 crc kubenswrapper[4605]: I1001 14:40:59.963036 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0632cad6-19b5-434a-b670-e3afe01e3143-catalog-content\") pod \"redhat-operators-rh5sq\" (UID: \"0632cad6-19b5-434a-b670-e3afe01e3143\") " pod="openshift-marketplace/redhat-operators-rh5sq" Oct 01 14:40:59 crc kubenswrapper[4605]: I1001 14:40:59.963136 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0632cad6-19b5-434a-b670-e3afe01e3143-utilities\") pod \"redhat-operators-rh5sq\" (UID: \"0632cad6-19b5-434a-b670-e3afe01e3143\") " pod="openshift-marketplace/redhat-operators-rh5sq" Oct 01 14:40:59 crc kubenswrapper[4605]: I1001 14:40:59.963428 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fzvrf\" (UniqueName: \"kubernetes.io/projected/0632cad6-19b5-434a-b670-e3afe01e3143-kube-api-access-fzvrf\") pod \"redhat-operators-rh5sq\" (UID: \"0632cad6-19b5-434a-b670-e3afe01e3143\") " pod="openshift-marketplace/redhat-operators-rh5sq" Oct 01 14:40:59 crc kubenswrapper[4605]: I1001 14:40:59.964412 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0632cad6-19b5-434a-b670-e3afe01e3143-catalog-content\") pod \"redhat-operators-rh5sq\" (UID: \"0632cad6-19b5-434a-b670-e3afe01e3143\") " pod="openshift-marketplace/redhat-operators-rh5sq" Oct 01 14:40:59 crc kubenswrapper[4605]: I1001 14:40:59.964691 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0632cad6-19b5-434a-b670-e3afe01e3143-utilities\") pod \"redhat-operators-rh5sq\" (UID: \"0632cad6-19b5-434a-b670-e3afe01e3143\") " pod="openshift-marketplace/redhat-operators-rh5sq" Oct 01 14:41:00 crc kubenswrapper[4605]: I1001 14:41:00.003842 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fzvrf\" (UniqueName: \"kubernetes.io/projected/0632cad6-19b5-434a-b670-e3afe01e3143-kube-api-access-fzvrf\") pod \"redhat-operators-rh5sq\" (UID: \"0632cad6-19b5-434a-b670-e3afe01e3143\") " pod="openshift-marketplace/redhat-operators-rh5sq" Oct 01 14:41:00 crc kubenswrapper[4605]: I1001 14:41:00.049034 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rh5sq" Oct 01 14:41:00 crc kubenswrapper[4605]: I1001 14:41:00.512967 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rh5sq"] Oct 01 14:41:01 crc kubenswrapper[4605]: I1001 14:41:01.414528 4605 generic.go:334] "Generic (PLEG): container finished" podID="0632cad6-19b5-434a-b670-e3afe01e3143" containerID="109b5dca09b71f3cd28e83fa8fa51a3d01a664c89b3e2f3d7a85c60ee7562385" exitCode=0 Oct 01 14:41:01 crc kubenswrapper[4605]: I1001 14:41:01.414709 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rh5sq" event={"ID":"0632cad6-19b5-434a-b670-e3afe01e3143","Type":"ContainerDied","Data":"109b5dca09b71f3cd28e83fa8fa51a3d01a664c89b3e2f3d7a85c60ee7562385"} Oct 01 14:41:01 crc kubenswrapper[4605]: I1001 14:41:01.415181 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rh5sq" event={"ID":"0632cad6-19b5-434a-b670-e3afe01e3143","Type":"ContainerStarted","Data":"550ecb0198c452cbfe12a7fad55b47f1d7476542fee7609cdd73b5869ef5f2d6"} Oct 01 14:41:02 crc kubenswrapper[4605]: I1001 14:41:02.574752 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvrh9j_8542ad4d-301a-4957-ab3f-1c305ad1ff43/util/0.log" Oct 01 14:41:02 crc kubenswrapper[4605]: I1001 14:41:02.774186 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvrh9j_8542ad4d-301a-4957-ab3f-1c305ad1ff43/util/0.log" Oct 01 14:41:02 crc kubenswrapper[4605]: I1001 14:41:02.803716 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvrh9j_8542ad4d-301a-4957-ab3f-1c305ad1ff43/pull/0.log" Oct 01 14:41:02 crc kubenswrapper[4605]: I1001 14:41:02.814210 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvrh9j_8542ad4d-301a-4957-ab3f-1c305ad1ff43/pull/0.log" Oct 01 14:41:02 crc kubenswrapper[4605]: I1001 14:41:02.932546 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvrh9j_8542ad4d-301a-4957-ab3f-1c305ad1ff43/util/0.log" Oct 01 14:41:02 crc kubenswrapper[4605]: I1001 14:41:02.989203 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvrh9j_8542ad4d-301a-4957-ab3f-1c305ad1ff43/extract/0.log" Oct 01 14:41:03 crc kubenswrapper[4605]: I1001 14:41:03.001376 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvrh9j_8542ad4d-301a-4957-ab3f-1c305ad1ff43/pull/0.log" Oct 01 14:41:03 crc kubenswrapper[4605]: I1001 14:41:03.133907 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-rdmzw_4e85ab9d-cf8d-4814-939c-c779c53bfa45/extract-utilities/0.log" Oct 01 14:41:03 crc kubenswrapper[4605]: I1001 14:41:03.448827 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rh5sq" event={"ID":"0632cad6-19b5-434a-b670-e3afe01e3143","Type":"ContainerStarted","Data":"a61116daffa794cd3424b11d55127394c8ccfa3cdea5ab4de48d81faf6bddac8"} Oct 01 14:41:03 crc kubenswrapper[4605]: I1001 14:41:03.513453 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-rdmzw_4e85ab9d-cf8d-4814-939c-c779c53bfa45/extract-content/0.log" Oct 01 14:41:03 crc kubenswrapper[4605]: I1001 14:41:03.539184 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-rdmzw_4e85ab9d-cf8d-4814-939c-c779c53bfa45/extract-utilities/0.log" Oct 01 14:41:03 crc kubenswrapper[4605]: I1001 14:41:03.580466 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-rdmzw_4e85ab9d-cf8d-4814-939c-c779c53bfa45/extract-content/0.log" Oct 01 14:41:03 crc kubenswrapper[4605]: I1001 14:41:03.709083 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-rdmzw_4e85ab9d-cf8d-4814-939c-c779c53bfa45/extract-utilities/0.log" Oct 01 14:41:03 crc kubenswrapper[4605]: I1001 14:41:03.726344 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-rdmzw_4e85ab9d-cf8d-4814-939c-c779c53bfa45/extract-content/0.log" Oct 01 14:41:04 crc kubenswrapper[4605]: I1001 14:41:04.137014 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-7gnm7_46973963-c69a-460a-a5c3-3711005e4e00/extract-utilities/0.log" Oct 01 14:41:04 crc kubenswrapper[4605]: I1001 14:41:04.287778 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-7gnm7_46973963-c69a-460a-a5c3-3711005e4e00/extract-content/0.log" Oct 01 14:41:04 crc kubenswrapper[4605]: I1001 14:41:04.288516 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-7gnm7_46973963-c69a-460a-a5c3-3711005e4e00/extract-utilities/0.log" Oct 01 14:41:04 crc kubenswrapper[4605]: I1001 14:41:04.290481 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-rdmzw_4e85ab9d-cf8d-4814-939c-c779c53bfa45/registry-server/0.log" Oct 01 14:41:04 crc kubenswrapper[4605]: I1001 14:41:04.385810 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-7gnm7_46973963-c69a-460a-a5c3-3711005e4e00/extract-content/0.log" Oct 01 14:41:04 crc kubenswrapper[4605]: I1001 14:41:04.652379 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-7gnm7_46973963-c69a-460a-a5c3-3711005e4e00/extract-content/0.log" Oct 01 14:41:04 crc kubenswrapper[4605]: I1001 14:41:04.660989 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-7gnm7_46973963-c69a-460a-a5c3-3711005e4e00/extract-utilities/0.log" Oct 01 14:41:04 crc kubenswrapper[4605]: I1001 14:41:04.990343 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96clpr7_81bb3cba-46ef-47f6-8f79-326bd240dc58/util/0.log" Oct 01 14:41:05 crc kubenswrapper[4605]: I1001 14:41:05.188970 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96clpr7_81bb3cba-46ef-47f6-8f79-326bd240dc58/util/0.log" Oct 01 14:41:05 crc kubenswrapper[4605]: I1001 14:41:05.221426 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96clpr7_81bb3cba-46ef-47f6-8f79-326bd240dc58/pull/0.log" Oct 01 14:41:05 crc kubenswrapper[4605]: I1001 14:41:05.272762 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96clpr7_81bb3cba-46ef-47f6-8f79-326bd240dc58/pull/0.log" Oct 01 14:41:05 crc kubenswrapper[4605]: I1001 14:41:05.569394 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96clpr7_81bb3cba-46ef-47f6-8f79-326bd240dc58/util/0.log" Oct 01 14:41:05 crc kubenswrapper[4605]: I1001 14:41:05.632588 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96clpr7_81bb3cba-46ef-47f6-8f79-326bd240dc58/pull/0.log" Oct 01 14:41:05 crc kubenswrapper[4605]: I1001 14:41:05.658510 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96clpr7_81bb3cba-46ef-47f6-8f79-326bd240dc58/extract/0.log" Oct 01 14:41:05 crc kubenswrapper[4605]: I1001 14:41:05.749323 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-7gnm7_46973963-c69a-460a-a5c3-3711005e4e00/registry-server/0.log" Oct 01 14:41:05 crc kubenswrapper[4605]: I1001 14:41:05.918030 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-fhshk_78062175-5452-4b18-96df-c602188693fb/marketplace-operator/0.log" Oct 01 14:41:06 crc kubenswrapper[4605]: I1001 14:41:06.335671 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-xgctj_6a5db63d-46d4-4967-b16e-5ee3222617d0/extract-utilities/0.log" Oct 01 14:41:06 crc kubenswrapper[4605]: I1001 14:41:06.448954 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-xgctj_6a5db63d-46d4-4967-b16e-5ee3222617d0/extract-utilities/0.log" Oct 01 14:41:06 crc kubenswrapper[4605]: I1001 14:41:06.521658 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-xgctj_6a5db63d-46d4-4967-b16e-5ee3222617d0/extract-content/0.log" Oct 01 14:41:06 crc kubenswrapper[4605]: I1001 14:41:06.537757 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-xgctj_6a5db63d-46d4-4967-b16e-5ee3222617d0/extract-content/0.log" Oct 01 14:41:06 crc kubenswrapper[4605]: I1001 14:41:06.773173 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-xgctj_6a5db63d-46d4-4967-b16e-5ee3222617d0/extract-content/0.log" Oct 01 14:41:06 crc kubenswrapper[4605]: I1001 14:41:06.814165 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-xgctj_6a5db63d-46d4-4967-b16e-5ee3222617d0/extract-utilities/0.log" Oct 01 14:41:06 crc kubenswrapper[4605]: I1001 14:41:06.880162 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-xgctj_6a5db63d-46d4-4967-b16e-5ee3222617d0/registry-server/0.log" Oct 01 14:41:06 crc kubenswrapper[4605]: I1001 14:41:06.911162 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-fw6xk_d5682155-2c45-4654-b77a-75760c61c945/extract-utilities/0.log" Oct 01 14:41:07 crc kubenswrapper[4605]: I1001 14:41:07.096704 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-fw6xk_d5682155-2c45-4654-b77a-75760c61c945/extract-content/0.log" Oct 01 14:41:07 crc kubenswrapper[4605]: I1001 14:41:07.133724 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-fw6xk_d5682155-2c45-4654-b77a-75760c61c945/extract-utilities/0.log" Oct 01 14:41:07 crc kubenswrapper[4605]: I1001 14:41:07.154854 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-fw6xk_d5682155-2c45-4654-b77a-75760c61c945/extract-content/0.log" Oct 01 14:41:07 crc kubenswrapper[4605]: I1001 14:41:07.381748 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-fw6xk_d5682155-2c45-4654-b77a-75760c61c945/extract-utilities/0.log" Oct 01 14:41:07 crc kubenswrapper[4605]: I1001 14:41:07.388577 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-fw6xk_d5682155-2c45-4654-b77a-75760c61c945/extract-content/0.log" Oct 01 14:41:07 crc kubenswrapper[4605]: I1001 14:41:07.441502 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-rh5sq_0632cad6-19b5-434a-b670-e3afe01e3143/extract-utilities/0.log" Oct 01 14:41:07 crc kubenswrapper[4605]: I1001 14:41:07.704643 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-fw6xk_d5682155-2c45-4654-b77a-75760c61c945/registry-server/0.log" Oct 01 14:41:07 crc kubenswrapper[4605]: I1001 14:41:07.784012 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-rh5sq_0632cad6-19b5-434a-b670-e3afe01e3143/extract-utilities/0.log" Oct 01 14:41:07 crc kubenswrapper[4605]: I1001 14:41:07.802024 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-rh5sq_0632cad6-19b5-434a-b670-e3afe01e3143/extract-content/0.log" Oct 01 14:41:07 crc kubenswrapper[4605]: I1001 14:41:07.838554 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-rh5sq_0632cad6-19b5-434a-b670-e3afe01e3143/extract-content/0.log" Oct 01 14:41:08 crc kubenswrapper[4605]: I1001 14:41:08.012050 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-rh5sq_0632cad6-19b5-434a-b670-e3afe01e3143/extract-utilities/0.log" Oct 01 14:41:08 crc kubenswrapper[4605]: I1001 14:41:08.017328 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-rh5sq_0632cad6-19b5-434a-b670-e3afe01e3143/extract-content/0.log" Oct 01 14:41:08 crc kubenswrapper[4605]: I1001 14:41:08.487549 4605 generic.go:334] "Generic (PLEG): container finished" podID="0632cad6-19b5-434a-b670-e3afe01e3143" containerID="a61116daffa794cd3424b11d55127394c8ccfa3cdea5ab4de48d81faf6bddac8" exitCode=0 Oct 01 14:41:08 crc kubenswrapper[4605]: I1001 14:41:08.487599 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rh5sq" event={"ID":"0632cad6-19b5-434a-b670-e3afe01e3143","Type":"ContainerDied","Data":"a61116daffa794cd3424b11d55127394c8ccfa3cdea5ab4de48d81faf6bddac8"} Oct 01 14:41:09 crc kubenswrapper[4605]: I1001 14:41:09.498032 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rh5sq" event={"ID":"0632cad6-19b5-434a-b670-e3afe01e3143","Type":"ContainerStarted","Data":"544b1842b08940786673406ffe56276a395bbb3a0ad5b243cf46d84f1f40a00a"} Oct 01 14:41:09 crc kubenswrapper[4605]: I1001 14:41:09.525084 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-rh5sq" podStartSLOduration=3.004061051 podStartE2EDuration="10.525068159s" podCreationTimestamp="2025-10-01 14:40:59 +0000 UTC" firstStartedPulling="2025-10-01 14:41:01.417531719 +0000 UTC m=+3384.161507927" lastFinishedPulling="2025-10-01 14:41:08.938538837 +0000 UTC m=+3391.682515035" observedRunningTime="2025-10-01 14:41:09.518025002 +0000 UTC m=+3392.262001200" watchObservedRunningTime="2025-10-01 14:41:09.525068159 +0000 UTC m=+3392.269044367" Oct 01 14:41:10 crc kubenswrapper[4605]: I1001 14:41:10.049656 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-rh5sq" Oct 01 14:41:10 crc kubenswrapper[4605]: I1001 14:41:10.051768 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-rh5sq" Oct 01 14:41:11 crc kubenswrapper[4605]: I1001 14:41:11.097329 4605 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-rh5sq" podUID="0632cad6-19b5-434a-b670-e3afe01e3143" containerName="registry-server" probeResult="failure" output=< Oct 01 14:41:11 crc kubenswrapper[4605]: timeout: failed to connect service ":50051" within 1s Oct 01 14:41:11 crc kubenswrapper[4605]: > Oct 01 14:41:21 crc kubenswrapper[4605]: I1001 14:41:21.097027 4605 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-rh5sq" podUID="0632cad6-19b5-434a-b670-e3afe01e3143" containerName="registry-server" probeResult="failure" output=< Oct 01 14:41:21 crc kubenswrapper[4605]: timeout: failed to connect service ":50051" within 1s Oct 01 14:41:21 crc kubenswrapper[4605]: > Oct 01 14:41:30 crc kubenswrapper[4605]: I1001 14:41:30.105456 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-rh5sq" Oct 01 14:41:30 crc kubenswrapper[4605]: I1001 14:41:30.188099 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-rh5sq" Oct 01 14:41:30 crc kubenswrapper[4605]: I1001 14:41:30.941666 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rh5sq"] Oct 01 14:41:31 crc kubenswrapper[4605]: I1001 14:41:31.669811 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-rh5sq" podUID="0632cad6-19b5-434a-b670-e3afe01e3143" containerName="registry-server" containerID="cri-o://544b1842b08940786673406ffe56276a395bbb3a0ad5b243cf46d84f1f40a00a" gracePeriod=2 Oct 01 14:41:32 crc kubenswrapper[4605]: I1001 14:41:32.191293 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rh5sq" Oct 01 14:41:32 crc kubenswrapper[4605]: I1001 14:41:32.283422 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0632cad6-19b5-434a-b670-e3afe01e3143-catalog-content\") pod \"0632cad6-19b5-434a-b670-e3afe01e3143\" (UID: \"0632cad6-19b5-434a-b670-e3afe01e3143\") " Oct 01 14:41:32 crc kubenswrapper[4605]: I1001 14:41:32.283521 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0632cad6-19b5-434a-b670-e3afe01e3143-utilities\") pod \"0632cad6-19b5-434a-b670-e3afe01e3143\" (UID: \"0632cad6-19b5-434a-b670-e3afe01e3143\") " Oct 01 14:41:32 crc kubenswrapper[4605]: I1001 14:41:32.283558 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fzvrf\" (UniqueName: \"kubernetes.io/projected/0632cad6-19b5-434a-b670-e3afe01e3143-kube-api-access-fzvrf\") pod \"0632cad6-19b5-434a-b670-e3afe01e3143\" (UID: \"0632cad6-19b5-434a-b670-e3afe01e3143\") " Oct 01 14:41:32 crc kubenswrapper[4605]: I1001 14:41:32.284470 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0632cad6-19b5-434a-b670-e3afe01e3143-utilities" (OuterVolumeSpecName: "utilities") pod "0632cad6-19b5-434a-b670-e3afe01e3143" (UID: "0632cad6-19b5-434a-b670-e3afe01e3143"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:41:32 crc kubenswrapper[4605]: I1001 14:41:32.289280 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0632cad6-19b5-434a-b670-e3afe01e3143-kube-api-access-fzvrf" (OuterVolumeSpecName: "kube-api-access-fzvrf") pod "0632cad6-19b5-434a-b670-e3afe01e3143" (UID: "0632cad6-19b5-434a-b670-e3afe01e3143"). InnerVolumeSpecName "kube-api-access-fzvrf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:41:32 crc kubenswrapper[4605]: I1001 14:41:32.391252 4605 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0632cad6-19b5-434a-b670-e3afe01e3143-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 14:41:32 crc kubenswrapper[4605]: I1001 14:41:32.391289 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fzvrf\" (UniqueName: \"kubernetes.io/projected/0632cad6-19b5-434a-b670-e3afe01e3143-kube-api-access-fzvrf\") on node \"crc\" DevicePath \"\"" Oct 01 14:41:32 crc kubenswrapper[4605]: I1001 14:41:32.410280 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0632cad6-19b5-434a-b670-e3afe01e3143-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0632cad6-19b5-434a-b670-e3afe01e3143" (UID: "0632cad6-19b5-434a-b670-e3afe01e3143"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:41:32 crc kubenswrapper[4605]: I1001 14:41:32.493063 4605 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0632cad6-19b5-434a-b670-e3afe01e3143-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 14:41:32 crc kubenswrapper[4605]: I1001 14:41:32.679081 4605 generic.go:334] "Generic (PLEG): container finished" podID="0632cad6-19b5-434a-b670-e3afe01e3143" containerID="544b1842b08940786673406ffe56276a395bbb3a0ad5b243cf46d84f1f40a00a" exitCode=0 Oct 01 14:41:32 crc kubenswrapper[4605]: I1001 14:41:32.679127 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rh5sq" event={"ID":"0632cad6-19b5-434a-b670-e3afe01e3143","Type":"ContainerDied","Data":"544b1842b08940786673406ffe56276a395bbb3a0ad5b243cf46d84f1f40a00a"} Oct 01 14:41:32 crc kubenswrapper[4605]: I1001 14:41:32.679212 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rh5sq" event={"ID":"0632cad6-19b5-434a-b670-e3afe01e3143","Type":"ContainerDied","Data":"550ecb0198c452cbfe12a7fad55b47f1d7476542fee7609cdd73b5869ef5f2d6"} Oct 01 14:41:32 crc kubenswrapper[4605]: I1001 14:41:32.679238 4605 scope.go:117] "RemoveContainer" containerID="544b1842b08940786673406ffe56276a395bbb3a0ad5b243cf46d84f1f40a00a" Oct 01 14:41:32 crc kubenswrapper[4605]: I1001 14:41:32.679770 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rh5sq" Oct 01 14:41:32 crc kubenswrapper[4605]: I1001 14:41:32.706862 4605 scope.go:117] "RemoveContainer" containerID="a61116daffa794cd3424b11d55127394c8ccfa3cdea5ab4de48d81faf6bddac8" Oct 01 14:41:32 crc kubenswrapper[4605]: I1001 14:41:32.712359 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rh5sq"] Oct 01 14:41:32 crc kubenswrapper[4605]: I1001 14:41:32.720130 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-rh5sq"] Oct 01 14:41:32 crc kubenswrapper[4605]: I1001 14:41:32.731415 4605 scope.go:117] "RemoveContainer" containerID="109b5dca09b71f3cd28e83fa8fa51a3d01a664c89b3e2f3d7a85c60ee7562385" Oct 01 14:41:32 crc kubenswrapper[4605]: I1001 14:41:32.782537 4605 scope.go:117] "RemoveContainer" containerID="544b1842b08940786673406ffe56276a395bbb3a0ad5b243cf46d84f1f40a00a" Oct 01 14:41:32 crc kubenswrapper[4605]: E1001 14:41:32.783215 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"544b1842b08940786673406ffe56276a395bbb3a0ad5b243cf46d84f1f40a00a\": container with ID starting with 544b1842b08940786673406ffe56276a395bbb3a0ad5b243cf46d84f1f40a00a not found: ID does not exist" containerID="544b1842b08940786673406ffe56276a395bbb3a0ad5b243cf46d84f1f40a00a" Oct 01 14:41:32 crc kubenswrapper[4605]: I1001 14:41:32.783305 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"544b1842b08940786673406ffe56276a395bbb3a0ad5b243cf46d84f1f40a00a"} err="failed to get container status \"544b1842b08940786673406ffe56276a395bbb3a0ad5b243cf46d84f1f40a00a\": rpc error: code = NotFound desc = could not find container \"544b1842b08940786673406ffe56276a395bbb3a0ad5b243cf46d84f1f40a00a\": container with ID starting with 544b1842b08940786673406ffe56276a395bbb3a0ad5b243cf46d84f1f40a00a not found: ID does not exist" Oct 01 14:41:32 crc kubenswrapper[4605]: I1001 14:41:32.783376 4605 scope.go:117] "RemoveContainer" containerID="a61116daffa794cd3424b11d55127394c8ccfa3cdea5ab4de48d81faf6bddac8" Oct 01 14:41:32 crc kubenswrapper[4605]: E1001 14:41:32.791626 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a61116daffa794cd3424b11d55127394c8ccfa3cdea5ab4de48d81faf6bddac8\": container with ID starting with a61116daffa794cd3424b11d55127394c8ccfa3cdea5ab4de48d81faf6bddac8 not found: ID does not exist" containerID="a61116daffa794cd3424b11d55127394c8ccfa3cdea5ab4de48d81faf6bddac8" Oct 01 14:41:32 crc kubenswrapper[4605]: I1001 14:41:32.791796 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a61116daffa794cd3424b11d55127394c8ccfa3cdea5ab4de48d81faf6bddac8"} err="failed to get container status \"a61116daffa794cd3424b11d55127394c8ccfa3cdea5ab4de48d81faf6bddac8\": rpc error: code = NotFound desc = could not find container \"a61116daffa794cd3424b11d55127394c8ccfa3cdea5ab4de48d81faf6bddac8\": container with ID starting with a61116daffa794cd3424b11d55127394c8ccfa3cdea5ab4de48d81faf6bddac8 not found: ID does not exist" Oct 01 14:41:32 crc kubenswrapper[4605]: I1001 14:41:32.791869 4605 scope.go:117] "RemoveContainer" containerID="109b5dca09b71f3cd28e83fa8fa51a3d01a664c89b3e2f3d7a85c60ee7562385" Oct 01 14:41:32 crc kubenswrapper[4605]: E1001 14:41:32.792189 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"109b5dca09b71f3cd28e83fa8fa51a3d01a664c89b3e2f3d7a85c60ee7562385\": container with ID starting with 109b5dca09b71f3cd28e83fa8fa51a3d01a664c89b3e2f3d7a85c60ee7562385 not found: ID does not exist" containerID="109b5dca09b71f3cd28e83fa8fa51a3d01a664c89b3e2f3d7a85c60ee7562385" Oct 01 14:41:32 crc kubenswrapper[4605]: I1001 14:41:32.792269 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"109b5dca09b71f3cd28e83fa8fa51a3d01a664c89b3e2f3d7a85c60ee7562385"} err="failed to get container status \"109b5dca09b71f3cd28e83fa8fa51a3d01a664c89b3e2f3d7a85c60ee7562385\": rpc error: code = NotFound desc = could not find container \"109b5dca09b71f3cd28e83fa8fa51a3d01a664c89b3e2f3d7a85c60ee7562385\": container with ID starting with 109b5dca09b71f3cd28e83fa8fa51a3d01a664c89b3e2f3d7a85c60ee7562385 not found: ID does not exist" Oct 01 14:41:33 crc kubenswrapper[4605]: I1001 14:41:33.938740 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0632cad6-19b5-434a-b670-e3afe01e3143" path="/var/lib/kubelet/pods/0632cad6-19b5-434a-b670-e3afe01e3143/volumes" Oct 01 14:41:51 crc kubenswrapper[4605]: I1001 14:41:51.634000 4605 patch_prober.go:28] interesting pod/machine-config-daemon-zdjh7 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 14:41:51 crc kubenswrapper[4605]: I1001 14:41:51.635000 4605 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 14:42:21 crc kubenswrapper[4605]: I1001 14:42:21.630607 4605 patch_prober.go:28] interesting pod/machine-config-daemon-zdjh7 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 14:42:21 crc kubenswrapper[4605]: I1001 14:42:21.631053 4605 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 14:42:51 crc kubenswrapper[4605]: I1001 14:42:51.632266 4605 patch_prober.go:28] interesting pod/machine-config-daemon-zdjh7 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 14:42:51 crc kubenswrapper[4605]: I1001 14:42:51.632875 4605 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 14:42:51 crc kubenswrapper[4605]: I1001 14:42:51.632924 4605 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" Oct 01 14:42:51 crc kubenswrapper[4605]: I1001 14:42:51.633660 4605 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d8795f6535dab4a31d73cbcc308fbff0269900c90034691649da24daf04c865a"} pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 01 14:42:51 crc kubenswrapper[4605]: I1001 14:42:51.633717 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" containerName="machine-config-daemon" containerID="cri-o://d8795f6535dab4a31d73cbcc308fbff0269900c90034691649da24daf04c865a" gracePeriod=600 Oct 01 14:42:52 crc kubenswrapper[4605]: I1001 14:42:52.415776 4605 generic.go:334] "Generic (PLEG): container finished" podID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" containerID="d8795f6535dab4a31d73cbcc308fbff0269900c90034691649da24daf04c865a" exitCode=0 Oct 01 14:42:52 crc kubenswrapper[4605]: I1001 14:42:52.415816 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" event={"ID":"f3023060-c8ae-492b-b1cb-a418d9a8e59f","Type":"ContainerDied","Data":"d8795f6535dab4a31d73cbcc308fbff0269900c90034691649da24daf04c865a"} Oct 01 14:42:52 crc kubenswrapper[4605]: I1001 14:42:52.416163 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" event={"ID":"f3023060-c8ae-492b-b1cb-a418d9a8e59f","Type":"ContainerStarted","Data":"47fcc9d09b30d5b1909f661246ae31a622aeee683f33f29650e6dae968163ac2"} Oct 01 14:42:52 crc kubenswrapper[4605]: I1001 14:42:52.416184 4605 scope.go:117] "RemoveContainer" containerID="65c5b0f77ed3d625b4751ed14a86e021ac1ed482b076d853d9e9fd4b1ec77e97" Oct 01 14:43:20 crc kubenswrapper[4605]: I1001 14:43:20.736724 4605 generic.go:334] "Generic (PLEG): container finished" podID="3487f361-3e39-465e-9993-d6829b809a5d" containerID="e24f115d16ddff72497b4c0f2d5dafb99d16a5147464408d71e4035235a5d774" exitCode=0 Oct 01 14:43:20 crc kubenswrapper[4605]: I1001 14:43:20.736822 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-xb4pj/must-gather-wrnp4" event={"ID":"3487f361-3e39-465e-9993-d6829b809a5d","Type":"ContainerDied","Data":"e24f115d16ddff72497b4c0f2d5dafb99d16a5147464408d71e4035235a5d774"} Oct 01 14:43:20 crc kubenswrapper[4605]: I1001 14:43:20.738856 4605 scope.go:117] "RemoveContainer" containerID="e24f115d16ddff72497b4c0f2d5dafb99d16a5147464408d71e4035235a5d774" Oct 01 14:43:21 crc kubenswrapper[4605]: I1001 14:43:21.180860 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-xb4pj_must-gather-wrnp4_3487f361-3e39-465e-9993-d6829b809a5d/gather/0.log" Oct 01 14:43:30 crc kubenswrapper[4605]: I1001 14:43:30.211396 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-xb4pj/must-gather-wrnp4"] Oct 01 14:43:30 crc kubenswrapper[4605]: I1001 14:43:30.212181 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-xb4pj/must-gather-wrnp4" podUID="3487f361-3e39-465e-9993-d6829b809a5d" containerName="copy" containerID="cri-o://23dd8fdc84b7177126e8a641c838288c9e175aa04ddf5972a1a16124b1372c56" gracePeriod=2 Oct 01 14:43:30 crc kubenswrapper[4605]: I1001 14:43:30.221745 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-xb4pj/must-gather-wrnp4"] Oct 01 14:43:30 crc kubenswrapper[4605]: I1001 14:43:30.855712 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-xb4pj_must-gather-wrnp4_3487f361-3e39-465e-9993-d6829b809a5d/copy/0.log" Oct 01 14:43:30 crc kubenswrapper[4605]: I1001 14:43:30.855833 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-xb4pj_must-gather-wrnp4_3487f361-3e39-465e-9993-d6829b809a5d/copy/0.log" Oct 01 14:43:30 crc kubenswrapper[4605]: I1001 14:43:30.856330 4605 generic.go:334] "Generic (PLEG): container finished" podID="3487f361-3e39-465e-9993-d6829b809a5d" containerID="23dd8fdc84b7177126e8a641c838288c9e175aa04ddf5972a1a16124b1372c56" exitCode=143 Oct 01 14:43:30 crc kubenswrapper[4605]: I1001 14:43:30.856372 4605 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="094c4a60f8353dc84d986354bc56cdd1466da9c206376ed9ec1fd0e0c262dbae" Oct 01 14:43:30 crc kubenswrapper[4605]: I1001 14:43:30.856530 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xb4pj/must-gather-wrnp4" Oct 01 14:43:31 crc kubenswrapper[4605]: I1001 14:43:31.023931 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/3487f361-3e39-465e-9993-d6829b809a5d-must-gather-output\") pod \"3487f361-3e39-465e-9993-d6829b809a5d\" (UID: \"3487f361-3e39-465e-9993-d6829b809a5d\") " Oct 01 14:43:31 crc kubenswrapper[4605]: I1001 14:43:31.024388 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nd6lr\" (UniqueName: \"kubernetes.io/projected/3487f361-3e39-465e-9993-d6829b809a5d-kube-api-access-nd6lr\") pod \"3487f361-3e39-465e-9993-d6829b809a5d\" (UID: \"3487f361-3e39-465e-9993-d6829b809a5d\") " Oct 01 14:43:31 crc kubenswrapper[4605]: I1001 14:43:31.032478 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3487f361-3e39-465e-9993-d6829b809a5d-kube-api-access-nd6lr" (OuterVolumeSpecName: "kube-api-access-nd6lr") pod "3487f361-3e39-465e-9993-d6829b809a5d" (UID: "3487f361-3e39-465e-9993-d6829b809a5d"). InnerVolumeSpecName "kube-api-access-nd6lr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:43:31 crc kubenswrapper[4605]: I1001 14:43:31.127421 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nd6lr\" (UniqueName: \"kubernetes.io/projected/3487f361-3e39-465e-9993-d6829b809a5d-kube-api-access-nd6lr\") on node \"crc\" DevicePath \"\"" Oct 01 14:43:31 crc kubenswrapper[4605]: I1001 14:43:31.194480 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3487f361-3e39-465e-9993-d6829b809a5d-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "3487f361-3e39-465e-9993-d6829b809a5d" (UID: "3487f361-3e39-465e-9993-d6829b809a5d"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:43:31 crc kubenswrapper[4605]: I1001 14:43:31.228828 4605 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/3487f361-3e39-465e-9993-d6829b809a5d-must-gather-output\") on node \"crc\" DevicePath \"\"" Oct 01 14:43:31 crc kubenswrapper[4605]: I1001 14:43:31.864278 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xb4pj/must-gather-wrnp4" Oct 01 14:43:31 crc kubenswrapper[4605]: I1001 14:43:31.937772 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3487f361-3e39-465e-9993-d6829b809a5d" path="/var/lib/kubelet/pods/3487f361-3e39-465e-9993-d6829b809a5d/volumes" Oct 01 14:43:52 crc kubenswrapper[4605]: I1001 14:43:52.234421 4605 scope.go:117] "RemoveContainer" containerID="e2a56dad9214cb32a5bff0db2ddad77046a12c206510641b4f084eca4017efe6" Oct 01 14:43:52 crc kubenswrapper[4605]: I1001 14:43:52.259795 4605 scope.go:117] "RemoveContainer" containerID="e24f115d16ddff72497b4c0f2d5dafb99d16a5147464408d71e4035235a5d774" Oct 01 14:43:52 crc kubenswrapper[4605]: I1001 14:43:52.319052 4605 scope.go:117] "RemoveContainer" containerID="23dd8fdc84b7177126e8a641c838288c9e175aa04ddf5972a1a16124b1372c56" Oct 01 14:44:04 crc kubenswrapper[4605]: I1001 14:44:04.094708 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-nsl9m"] Oct 01 14:44:04 crc kubenswrapper[4605]: E1001 14:44:04.095870 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3487f361-3e39-465e-9993-d6829b809a5d" containerName="copy" Oct 01 14:44:04 crc kubenswrapper[4605]: I1001 14:44:04.095895 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="3487f361-3e39-465e-9993-d6829b809a5d" containerName="copy" Oct 01 14:44:04 crc kubenswrapper[4605]: E1001 14:44:04.095925 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0632cad6-19b5-434a-b670-e3afe01e3143" containerName="extract-content" Oct 01 14:44:04 crc kubenswrapper[4605]: I1001 14:44:04.095934 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="0632cad6-19b5-434a-b670-e3afe01e3143" containerName="extract-content" Oct 01 14:44:04 crc kubenswrapper[4605]: E1001 14:44:04.095951 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0632cad6-19b5-434a-b670-e3afe01e3143" containerName="registry-server" Oct 01 14:44:04 crc kubenswrapper[4605]: I1001 14:44:04.095959 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="0632cad6-19b5-434a-b670-e3afe01e3143" containerName="registry-server" Oct 01 14:44:04 crc kubenswrapper[4605]: E1001 14:44:04.095976 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3487f361-3e39-465e-9993-d6829b809a5d" containerName="gather" Oct 01 14:44:04 crc kubenswrapper[4605]: I1001 14:44:04.095986 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="3487f361-3e39-465e-9993-d6829b809a5d" containerName="gather" Oct 01 14:44:04 crc kubenswrapper[4605]: E1001 14:44:04.096012 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0632cad6-19b5-434a-b670-e3afe01e3143" containerName="extract-utilities" Oct 01 14:44:04 crc kubenswrapper[4605]: I1001 14:44:04.096021 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="0632cad6-19b5-434a-b670-e3afe01e3143" containerName="extract-utilities" Oct 01 14:44:04 crc kubenswrapper[4605]: I1001 14:44:04.096318 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="0632cad6-19b5-434a-b670-e3afe01e3143" containerName="registry-server" Oct 01 14:44:04 crc kubenswrapper[4605]: I1001 14:44:04.096336 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="3487f361-3e39-465e-9993-d6829b809a5d" containerName="gather" Oct 01 14:44:04 crc kubenswrapper[4605]: I1001 14:44:04.096363 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="3487f361-3e39-465e-9993-d6829b809a5d" containerName="copy" Oct 01 14:44:04 crc kubenswrapper[4605]: I1001 14:44:04.098157 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-nsl9m" Oct 01 14:44:04 crc kubenswrapper[4605]: I1001 14:44:04.117280 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-nsl9m"] Oct 01 14:44:04 crc kubenswrapper[4605]: I1001 14:44:04.250432 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/06cc68a8-cbe3-4669-979d-06cae0702b50-utilities\") pod \"community-operators-nsl9m\" (UID: \"06cc68a8-cbe3-4669-979d-06cae0702b50\") " pod="openshift-marketplace/community-operators-nsl9m" Oct 01 14:44:04 crc kubenswrapper[4605]: I1001 14:44:04.250505 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x7f58\" (UniqueName: \"kubernetes.io/projected/06cc68a8-cbe3-4669-979d-06cae0702b50-kube-api-access-x7f58\") pod \"community-operators-nsl9m\" (UID: \"06cc68a8-cbe3-4669-979d-06cae0702b50\") " pod="openshift-marketplace/community-operators-nsl9m" Oct 01 14:44:04 crc kubenswrapper[4605]: I1001 14:44:04.250664 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/06cc68a8-cbe3-4669-979d-06cae0702b50-catalog-content\") pod \"community-operators-nsl9m\" (UID: \"06cc68a8-cbe3-4669-979d-06cae0702b50\") " pod="openshift-marketplace/community-operators-nsl9m" Oct 01 14:44:04 crc kubenswrapper[4605]: I1001 14:44:04.352118 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/06cc68a8-cbe3-4669-979d-06cae0702b50-utilities\") pod \"community-operators-nsl9m\" (UID: \"06cc68a8-cbe3-4669-979d-06cae0702b50\") " pod="openshift-marketplace/community-operators-nsl9m" Oct 01 14:44:04 crc kubenswrapper[4605]: I1001 14:44:04.352182 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x7f58\" (UniqueName: \"kubernetes.io/projected/06cc68a8-cbe3-4669-979d-06cae0702b50-kube-api-access-x7f58\") pod \"community-operators-nsl9m\" (UID: \"06cc68a8-cbe3-4669-979d-06cae0702b50\") " pod="openshift-marketplace/community-operators-nsl9m" Oct 01 14:44:04 crc kubenswrapper[4605]: I1001 14:44:04.352321 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/06cc68a8-cbe3-4669-979d-06cae0702b50-catalog-content\") pod \"community-operators-nsl9m\" (UID: \"06cc68a8-cbe3-4669-979d-06cae0702b50\") " pod="openshift-marketplace/community-operators-nsl9m" Oct 01 14:44:04 crc kubenswrapper[4605]: I1001 14:44:04.352851 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/06cc68a8-cbe3-4669-979d-06cae0702b50-utilities\") pod \"community-operators-nsl9m\" (UID: \"06cc68a8-cbe3-4669-979d-06cae0702b50\") " pod="openshift-marketplace/community-operators-nsl9m" Oct 01 14:44:04 crc kubenswrapper[4605]: I1001 14:44:04.353130 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/06cc68a8-cbe3-4669-979d-06cae0702b50-catalog-content\") pod \"community-operators-nsl9m\" (UID: \"06cc68a8-cbe3-4669-979d-06cae0702b50\") " pod="openshift-marketplace/community-operators-nsl9m" Oct 01 14:44:04 crc kubenswrapper[4605]: I1001 14:44:04.371917 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x7f58\" (UniqueName: \"kubernetes.io/projected/06cc68a8-cbe3-4669-979d-06cae0702b50-kube-api-access-x7f58\") pod \"community-operators-nsl9m\" (UID: \"06cc68a8-cbe3-4669-979d-06cae0702b50\") " pod="openshift-marketplace/community-operators-nsl9m" Oct 01 14:44:04 crc kubenswrapper[4605]: I1001 14:44:04.426360 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-nsl9m" Oct 01 14:44:05 crc kubenswrapper[4605]: I1001 14:44:05.023311 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-nsl9m"] Oct 01 14:44:05 crc kubenswrapper[4605]: I1001 14:44:05.143543 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nsl9m" event={"ID":"06cc68a8-cbe3-4669-979d-06cae0702b50","Type":"ContainerStarted","Data":"6311ec0a28cf915c6d77b32d42f697a42427c05074a1de8766555cb25b648791"} Oct 01 14:44:06 crc kubenswrapper[4605]: I1001 14:44:06.151758 4605 generic.go:334] "Generic (PLEG): container finished" podID="06cc68a8-cbe3-4669-979d-06cae0702b50" containerID="76509d03b2d263f167a6fc1de9496c47e42980e978a0e31caf617f644c1f8384" exitCode=0 Oct 01 14:44:06 crc kubenswrapper[4605]: I1001 14:44:06.151815 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nsl9m" event={"ID":"06cc68a8-cbe3-4669-979d-06cae0702b50","Type":"ContainerDied","Data":"76509d03b2d263f167a6fc1de9496c47e42980e978a0e31caf617f644c1f8384"} Oct 01 14:44:06 crc kubenswrapper[4605]: I1001 14:44:06.154271 4605 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 01 14:44:08 crc kubenswrapper[4605]: I1001 14:44:08.172795 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nsl9m" event={"ID":"06cc68a8-cbe3-4669-979d-06cae0702b50","Type":"ContainerStarted","Data":"6806f36a5b3177f38c3f59ee8e5cc49058e097b4dfa4e40eca6ffc97238e9068"} Oct 01 14:44:11 crc kubenswrapper[4605]: I1001 14:44:11.205171 4605 generic.go:334] "Generic (PLEG): container finished" podID="06cc68a8-cbe3-4669-979d-06cae0702b50" containerID="6806f36a5b3177f38c3f59ee8e5cc49058e097b4dfa4e40eca6ffc97238e9068" exitCode=0 Oct 01 14:44:11 crc kubenswrapper[4605]: I1001 14:44:11.205286 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nsl9m" event={"ID":"06cc68a8-cbe3-4669-979d-06cae0702b50","Type":"ContainerDied","Data":"6806f36a5b3177f38c3f59ee8e5cc49058e097b4dfa4e40eca6ffc97238e9068"} Oct 01 14:44:11 crc kubenswrapper[4605]: I1001 14:44:11.355469 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-4kxmw/must-gather-8j6bn"] Oct 01 14:44:11 crc kubenswrapper[4605]: I1001 14:44:11.356964 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4kxmw/must-gather-8j6bn" Oct 01 14:44:11 crc kubenswrapper[4605]: I1001 14:44:11.377424 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-4kxmw/must-gather-8j6bn"] Oct 01 14:44:11 crc kubenswrapper[4605]: I1001 14:44:11.378714 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-4kxmw"/"kube-root-ca.crt" Oct 01 14:44:11 crc kubenswrapper[4605]: I1001 14:44:11.378904 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-4kxmw"/"openshift-service-ca.crt" Oct 01 14:44:11 crc kubenswrapper[4605]: I1001 14:44:11.404663 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h8dmm\" (UniqueName: \"kubernetes.io/projected/a2da0130-d7f9-4ede-a9d1-ce8a70e98c86-kube-api-access-h8dmm\") pod \"must-gather-8j6bn\" (UID: \"a2da0130-d7f9-4ede-a9d1-ce8a70e98c86\") " pod="openshift-must-gather-4kxmw/must-gather-8j6bn" Oct 01 14:44:11 crc kubenswrapper[4605]: I1001 14:44:11.404818 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/a2da0130-d7f9-4ede-a9d1-ce8a70e98c86-must-gather-output\") pod \"must-gather-8j6bn\" (UID: \"a2da0130-d7f9-4ede-a9d1-ce8a70e98c86\") " pod="openshift-must-gather-4kxmw/must-gather-8j6bn" Oct 01 14:44:11 crc kubenswrapper[4605]: I1001 14:44:11.506139 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/a2da0130-d7f9-4ede-a9d1-ce8a70e98c86-must-gather-output\") pod \"must-gather-8j6bn\" (UID: \"a2da0130-d7f9-4ede-a9d1-ce8a70e98c86\") " pod="openshift-must-gather-4kxmw/must-gather-8j6bn" Oct 01 14:44:11 crc kubenswrapper[4605]: I1001 14:44:11.506236 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h8dmm\" (UniqueName: \"kubernetes.io/projected/a2da0130-d7f9-4ede-a9d1-ce8a70e98c86-kube-api-access-h8dmm\") pod \"must-gather-8j6bn\" (UID: \"a2da0130-d7f9-4ede-a9d1-ce8a70e98c86\") " pod="openshift-must-gather-4kxmw/must-gather-8j6bn" Oct 01 14:44:11 crc kubenswrapper[4605]: I1001 14:44:11.506665 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/a2da0130-d7f9-4ede-a9d1-ce8a70e98c86-must-gather-output\") pod \"must-gather-8j6bn\" (UID: \"a2da0130-d7f9-4ede-a9d1-ce8a70e98c86\") " pod="openshift-must-gather-4kxmw/must-gather-8j6bn" Oct 01 14:44:11 crc kubenswrapper[4605]: I1001 14:44:11.531057 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h8dmm\" (UniqueName: \"kubernetes.io/projected/a2da0130-d7f9-4ede-a9d1-ce8a70e98c86-kube-api-access-h8dmm\") pod \"must-gather-8j6bn\" (UID: \"a2da0130-d7f9-4ede-a9d1-ce8a70e98c86\") " pod="openshift-must-gather-4kxmw/must-gather-8j6bn" Oct 01 14:44:11 crc kubenswrapper[4605]: I1001 14:44:11.679604 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4kxmw/must-gather-8j6bn" Oct 01 14:44:12 crc kubenswrapper[4605]: I1001 14:44:12.215859 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nsl9m" event={"ID":"06cc68a8-cbe3-4669-979d-06cae0702b50","Type":"ContainerStarted","Data":"9d000d5a035327e21fb0cccf8de1f6a68d6f4c9cffd6d8d72bf0d43f0c92a1ec"} Oct 01 14:44:12 crc kubenswrapper[4605]: I1001 14:44:12.253538 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-nsl9m" podStartSLOduration=2.595637152 podStartE2EDuration="8.253516049s" podCreationTimestamp="2025-10-01 14:44:04 +0000 UTC" firstStartedPulling="2025-10-01 14:44:06.153932555 +0000 UTC m=+3568.897908773" lastFinishedPulling="2025-10-01 14:44:11.811811472 +0000 UTC m=+3574.555787670" observedRunningTime="2025-10-01 14:44:12.237434484 +0000 UTC m=+3574.981410702" watchObservedRunningTime="2025-10-01 14:44:12.253516049 +0000 UTC m=+3574.997492257" Oct 01 14:44:12 crc kubenswrapper[4605]: I1001 14:44:12.277856 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-4kxmw/must-gather-8j6bn"] Oct 01 14:44:13 crc kubenswrapper[4605]: I1001 14:44:13.224681 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-4kxmw/must-gather-8j6bn" event={"ID":"a2da0130-d7f9-4ede-a9d1-ce8a70e98c86","Type":"ContainerStarted","Data":"3adee77015422ceaf3c1ef4878f473915d1fc4afc2e51bac3c3fd96c8be29e51"} Oct 01 14:44:13 crc kubenswrapper[4605]: I1001 14:44:13.224937 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-4kxmw/must-gather-8j6bn" event={"ID":"a2da0130-d7f9-4ede-a9d1-ce8a70e98c86","Type":"ContainerStarted","Data":"5e1ca03101d6585c2176a2d1218da4fa4ba12d06d2e52650199ebf3c3fdeecc3"} Oct 01 14:44:13 crc kubenswrapper[4605]: I1001 14:44:13.224952 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-4kxmw/must-gather-8j6bn" event={"ID":"a2da0130-d7f9-4ede-a9d1-ce8a70e98c86","Type":"ContainerStarted","Data":"5576199bfae388c5ecb05aeb4dd43ad6d757883238b3bde5513d3116b818e71f"} Oct 01 14:44:13 crc kubenswrapper[4605]: I1001 14:44:13.327632 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-4kxmw/must-gather-8j6bn" podStartSLOduration=2.32760489 podStartE2EDuration="2.32760489s" podCreationTimestamp="2025-10-01 14:44:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 14:44:13.257619949 +0000 UTC m=+3576.001596157" watchObservedRunningTime="2025-10-01 14:44:13.32760489 +0000 UTC m=+3576.071581098" Oct 01 14:44:14 crc kubenswrapper[4605]: I1001 14:44:14.427199 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-nsl9m" Oct 01 14:44:14 crc kubenswrapper[4605]: I1001 14:44:14.427247 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-nsl9m" Oct 01 14:44:15 crc kubenswrapper[4605]: I1001 14:44:15.482560 4605 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-nsl9m" podUID="06cc68a8-cbe3-4669-979d-06cae0702b50" containerName="registry-server" probeResult="failure" output=< Oct 01 14:44:15 crc kubenswrapper[4605]: timeout: failed to connect service ":50051" within 1s Oct 01 14:44:15 crc kubenswrapper[4605]: > Oct 01 14:44:16 crc kubenswrapper[4605]: I1001 14:44:16.389238 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-4kxmw/crc-debug-pqgr9"] Oct 01 14:44:16 crc kubenswrapper[4605]: I1001 14:44:16.391375 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4kxmw/crc-debug-pqgr9" Oct 01 14:44:16 crc kubenswrapper[4605]: I1001 14:44:16.393997 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-4kxmw"/"default-dockercfg-lncvf" Oct 01 14:44:16 crc kubenswrapper[4605]: I1001 14:44:16.432527 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2vkfj\" (UniqueName: \"kubernetes.io/projected/f1e68ef6-82ee-4ae2-b12f-980de1bcc8d0-kube-api-access-2vkfj\") pod \"crc-debug-pqgr9\" (UID: \"f1e68ef6-82ee-4ae2-b12f-980de1bcc8d0\") " pod="openshift-must-gather-4kxmw/crc-debug-pqgr9" Oct 01 14:44:16 crc kubenswrapper[4605]: I1001 14:44:16.432569 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f1e68ef6-82ee-4ae2-b12f-980de1bcc8d0-host\") pod \"crc-debug-pqgr9\" (UID: \"f1e68ef6-82ee-4ae2-b12f-980de1bcc8d0\") " pod="openshift-must-gather-4kxmw/crc-debug-pqgr9" Oct 01 14:44:16 crc kubenswrapper[4605]: I1001 14:44:16.534066 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2vkfj\" (UniqueName: \"kubernetes.io/projected/f1e68ef6-82ee-4ae2-b12f-980de1bcc8d0-kube-api-access-2vkfj\") pod \"crc-debug-pqgr9\" (UID: \"f1e68ef6-82ee-4ae2-b12f-980de1bcc8d0\") " pod="openshift-must-gather-4kxmw/crc-debug-pqgr9" Oct 01 14:44:16 crc kubenswrapper[4605]: I1001 14:44:16.534139 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f1e68ef6-82ee-4ae2-b12f-980de1bcc8d0-host\") pod \"crc-debug-pqgr9\" (UID: \"f1e68ef6-82ee-4ae2-b12f-980de1bcc8d0\") " pod="openshift-must-gather-4kxmw/crc-debug-pqgr9" Oct 01 14:44:16 crc kubenswrapper[4605]: I1001 14:44:16.534316 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f1e68ef6-82ee-4ae2-b12f-980de1bcc8d0-host\") pod \"crc-debug-pqgr9\" (UID: \"f1e68ef6-82ee-4ae2-b12f-980de1bcc8d0\") " pod="openshift-must-gather-4kxmw/crc-debug-pqgr9" Oct 01 14:44:16 crc kubenswrapper[4605]: I1001 14:44:16.561302 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2vkfj\" (UniqueName: \"kubernetes.io/projected/f1e68ef6-82ee-4ae2-b12f-980de1bcc8d0-kube-api-access-2vkfj\") pod \"crc-debug-pqgr9\" (UID: \"f1e68ef6-82ee-4ae2-b12f-980de1bcc8d0\") " pod="openshift-must-gather-4kxmw/crc-debug-pqgr9" Oct 01 14:44:16 crc kubenswrapper[4605]: I1001 14:44:16.709362 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4kxmw/crc-debug-pqgr9" Oct 01 14:44:17 crc kubenswrapper[4605]: I1001 14:44:17.280765 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-4kxmw/crc-debug-pqgr9" event={"ID":"f1e68ef6-82ee-4ae2-b12f-980de1bcc8d0","Type":"ContainerStarted","Data":"ccbd25a81b423e652a742023578f0f25a058374af9af64600a62c2459880cc18"} Oct 01 14:44:17 crc kubenswrapper[4605]: I1001 14:44:17.280985 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-4kxmw/crc-debug-pqgr9" event={"ID":"f1e68ef6-82ee-4ae2-b12f-980de1bcc8d0","Type":"ContainerStarted","Data":"ee61e022ddf9c80dc374ea38f55dcce456043f24c31c16f4befc96e3ae50b268"} Oct 01 14:44:17 crc kubenswrapper[4605]: I1001 14:44:17.303731 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-4kxmw/crc-debug-pqgr9" podStartSLOduration=1.30370539 podStartE2EDuration="1.30370539s" podCreationTimestamp="2025-10-01 14:44:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 14:44:17.295946684 +0000 UTC m=+3580.039922892" watchObservedRunningTime="2025-10-01 14:44:17.30370539 +0000 UTC m=+3580.047681598" Oct 01 14:44:24 crc kubenswrapper[4605]: I1001 14:44:24.479234 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-nsl9m" Oct 01 14:44:24 crc kubenswrapper[4605]: I1001 14:44:24.532663 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-nsl9m" Oct 01 14:44:24 crc kubenswrapper[4605]: I1001 14:44:24.714562 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-nsl9m"] Oct 01 14:44:26 crc kubenswrapper[4605]: I1001 14:44:26.369958 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-nsl9m" podUID="06cc68a8-cbe3-4669-979d-06cae0702b50" containerName="registry-server" containerID="cri-o://9d000d5a035327e21fb0cccf8de1f6a68d6f4c9cffd6d8d72bf0d43f0c92a1ec" gracePeriod=2 Oct 01 14:44:27 crc kubenswrapper[4605]: I1001 14:44:27.381084 4605 generic.go:334] "Generic (PLEG): container finished" podID="06cc68a8-cbe3-4669-979d-06cae0702b50" containerID="9d000d5a035327e21fb0cccf8de1f6a68d6f4c9cffd6d8d72bf0d43f0c92a1ec" exitCode=0 Oct 01 14:44:27 crc kubenswrapper[4605]: I1001 14:44:27.381165 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nsl9m" event={"ID":"06cc68a8-cbe3-4669-979d-06cae0702b50","Type":"ContainerDied","Data":"9d000d5a035327e21fb0cccf8de1f6a68d6f4c9cffd6d8d72bf0d43f0c92a1ec"} Oct 01 14:44:27 crc kubenswrapper[4605]: I1001 14:44:27.381412 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nsl9m" event={"ID":"06cc68a8-cbe3-4669-979d-06cae0702b50","Type":"ContainerDied","Data":"6311ec0a28cf915c6d77b32d42f697a42427c05074a1de8766555cb25b648791"} Oct 01 14:44:27 crc kubenswrapper[4605]: I1001 14:44:27.381431 4605 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6311ec0a28cf915c6d77b32d42f697a42427c05074a1de8766555cb25b648791" Oct 01 14:44:27 crc kubenswrapper[4605]: I1001 14:44:27.920607 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-nsl9m" Oct 01 14:44:28 crc kubenswrapper[4605]: I1001 14:44:28.080657 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/06cc68a8-cbe3-4669-979d-06cae0702b50-utilities\") pod \"06cc68a8-cbe3-4669-979d-06cae0702b50\" (UID: \"06cc68a8-cbe3-4669-979d-06cae0702b50\") " Oct 01 14:44:28 crc kubenswrapper[4605]: I1001 14:44:28.081173 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/06cc68a8-cbe3-4669-979d-06cae0702b50-catalog-content\") pod \"06cc68a8-cbe3-4669-979d-06cae0702b50\" (UID: \"06cc68a8-cbe3-4669-979d-06cae0702b50\") " Oct 01 14:44:28 crc kubenswrapper[4605]: I1001 14:44:28.081356 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7f58\" (UniqueName: \"kubernetes.io/projected/06cc68a8-cbe3-4669-979d-06cae0702b50-kube-api-access-x7f58\") pod \"06cc68a8-cbe3-4669-979d-06cae0702b50\" (UID: \"06cc68a8-cbe3-4669-979d-06cae0702b50\") " Oct 01 14:44:28 crc kubenswrapper[4605]: I1001 14:44:28.081701 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/06cc68a8-cbe3-4669-979d-06cae0702b50-utilities" (OuterVolumeSpecName: "utilities") pod "06cc68a8-cbe3-4669-979d-06cae0702b50" (UID: "06cc68a8-cbe3-4669-979d-06cae0702b50"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:44:28 crc kubenswrapper[4605]: I1001 14:44:28.081934 4605 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/06cc68a8-cbe3-4669-979d-06cae0702b50-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 14:44:28 crc kubenswrapper[4605]: I1001 14:44:28.107208 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/06cc68a8-cbe3-4669-979d-06cae0702b50-kube-api-access-x7f58" (OuterVolumeSpecName: "kube-api-access-x7f58") pod "06cc68a8-cbe3-4669-979d-06cae0702b50" (UID: "06cc68a8-cbe3-4669-979d-06cae0702b50"). InnerVolumeSpecName "kube-api-access-x7f58". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:44:28 crc kubenswrapper[4605]: I1001 14:44:28.155401 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/06cc68a8-cbe3-4669-979d-06cae0702b50-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "06cc68a8-cbe3-4669-979d-06cae0702b50" (UID: "06cc68a8-cbe3-4669-979d-06cae0702b50"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:44:28 crc kubenswrapper[4605]: I1001 14:44:28.186384 4605 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/06cc68a8-cbe3-4669-979d-06cae0702b50-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 14:44:28 crc kubenswrapper[4605]: I1001 14:44:28.186426 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7f58\" (UniqueName: \"kubernetes.io/projected/06cc68a8-cbe3-4669-979d-06cae0702b50-kube-api-access-x7f58\") on node \"crc\" DevicePath \"\"" Oct 01 14:44:28 crc kubenswrapper[4605]: I1001 14:44:28.390108 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-nsl9m" Oct 01 14:44:28 crc kubenswrapper[4605]: I1001 14:44:28.446059 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-nsl9m"] Oct 01 14:44:28 crc kubenswrapper[4605]: I1001 14:44:28.463752 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-nsl9m"] Oct 01 14:44:29 crc kubenswrapper[4605]: I1001 14:44:29.936684 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="06cc68a8-cbe3-4669-979d-06cae0702b50" path="/var/lib/kubelet/pods/06cc68a8-cbe3-4669-979d-06cae0702b50/volumes" Oct 01 14:45:00 crc kubenswrapper[4605]: I1001 14:45:00.193117 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29322165-7jgsp"] Oct 01 14:45:00 crc kubenswrapper[4605]: E1001 14:45:00.194245 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06cc68a8-cbe3-4669-979d-06cae0702b50" containerName="registry-server" Oct 01 14:45:00 crc kubenswrapper[4605]: I1001 14:45:00.194258 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="06cc68a8-cbe3-4669-979d-06cae0702b50" containerName="registry-server" Oct 01 14:45:00 crc kubenswrapper[4605]: E1001 14:45:00.194290 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06cc68a8-cbe3-4669-979d-06cae0702b50" containerName="extract-content" Oct 01 14:45:00 crc kubenswrapper[4605]: I1001 14:45:00.194297 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="06cc68a8-cbe3-4669-979d-06cae0702b50" containerName="extract-content" Oct 01 14:45:00 crc kubenswrapper[4605]: E1001 14:45:00.194308 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06cc68a8-cbe3-4669-979d-06cae0702b50" containerName="extract-utilities" Oct 01 14:45:00 crc kubenswrapper[4605]: I1001 14:45:00.194314 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="06cc68a8-cbe3-4669-979d-06cae0702b50" containerName="extract-utilities" Oct 01 14:45:00 crc kubenswrapper[4605]: I1001 14:45:00.194548 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="06cc68a8-cbe3-4669-979d-06cae0702b50" containerName="registry-server" Oct 01 14:45:00 crc kubenswrapper[4605]: I1001 14:45:00.195191 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29322165-7jgsp" Oct 01 14:45:00 crc kubenswrapper[4605]: I1001 14:45:00.199022 4605 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 01 14:45:00 crc kubenswrapper[4605]: I1001 14:45:00.199417 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 01 14:45:00 crc kubenswrapper[4605]: I1001 14:45:00.219107 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29322165-7jgsp"] Oct 01 14:45:00 crc kubenswrapper[4605]: I1001 14:45:00.269075 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lff4b\" (UniqueName: \"kubernetes.io/projected/470ca42d-555a-4e87-8387-520d65563663-kube-api-access-lff4b\") pod \"collect-profiles-29322165-7jgsp\" (UID: \"470ca42d-555a-4e87-8387-520d65563663\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322165-7jgsp" Oct 01 14:45:00 crc kubenswrapper[4605]: I1001 14:45:00.269464 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/470ca42d-555a-4e87-8387-520d65563663-config-volume\") pod \"collect-profiles-29322165-7jgsp\" (UID: \"470ca42d-555a-4e87-8387-520d65563663\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322165-7jgsp" Oct 01 14:45:00 crc kubenswrapper[4605]: I1001 14:45:00.269619 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/470ca42d-555a-4e87-8387-520d65563663-secret-volume\") pod \"collect-profiles-29322165-7jgsp\" (UID: \"470ca42d-555a-4e87-8387-520d65563663\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322165-7jgsp" Oct 01 14:45:00 crc kubenswrapper[4605]: I1001 14:45:00.371016 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/470ca42d-555a-4e87-8387-520d65563663-config-volume\") pod \"collect-profiles-29322165-7jgsp\" (UID: \"470ca42d-555a-4e87-8387-520d65563663\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322165-7jgsp" Oct 01 14:45:00 crc kubenswrapper[4605]: I1001 14:45:00.371107 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/470ca42d-555a-4e87-8387-520d65563663-secret-volume\") pod \"collect-profiles-29322165-7jgsp\" (UID: \"470ca42d-555a-4e87-8387-520d65563663\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322165-7jgsp" Oct 01 14:45:00 crc kubenswrapper[4605]: I1001 14:45:00.371231 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lff4b\" (UniqueName: \"kubernetes.io/projected/470ca42d-555a-4e87-8387-520d65563663-kube-api-access-lff4b\") pod \"collect-profiles-29322165-7jgsp\" (UID: \"470ca42d-555a-4e87-8387-520d65563663\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322165-7jgsp" Oct 01 14:45:00 crc kubenswrapper[4605]: I1001 14:45:00.373507 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/470ca42d-555a-4e87-8387-520d65563663-config-volume\") pod \"collect-profiles-29322165-7jgsp\" (UID: \"470ca42d-555a-4e87-8387-520d65563663\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322165-7jgsp" Oct 01 14:45:00 crc kubenswrapper[4605]: I1001 14:45:00.383190 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/470ca42d-555a-4e87-8387-520d65563663-secret-volume\") pod \"collect-profiles-29322165-7jgsp\" (UID: \"470ca42d-555a-4e87-8387-520d65563663\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322165-7jgsp" Oct 01 14:45:00 crc kubenswrapper[4605]: I1001 14:45:00.396495 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lff4b\" (UniqueName: \"kubernetes.io/projected/470ca42d-555a-4e87-8387-520d65563663-kube-api-access-lff4b\") pod \"collect-profiles-29322165-7jgsp\" (UID: \"470ca42d-555a-4e87-8387-520d65563663\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29322165-7jgsp" Oct 01 14:45:00 crc kubenswrapper[4605]: I1001 14:45:00.518941 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29322165-7jgsp" Oct 01 14:45:01 crc kubenswrapper[4605]: I1001 14:45:01.105808 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29322165-7jgsp"] Oct 01 14:45:01 crc kubenswrapper[4605]: I1001 14:45:01.753114 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29322165-7jgsp" event={"ID":"470ca42d-555a-4e87-8387-520d65563663","Type":"ContainerStarted","Data":"be947812721b04352f08dca497534feb00d453e95f260b33cdac4667bfc7c921"} Oct 01 14:45:01 crc kubenswrapper[4605]: I1001 14:45:01.753683 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29322165-7jgsp" event={"ID":"470ca42d-555a-4e87-8387-520d65563663","Type":"ContainerStarted","Data":"eeca8e371687c738d8333edbf83693e6e344bf06eaea69bdb23b2fef633b9bdc"} Oct 01 14:45:01 crc kubenswrapper[4605]: I1001 14:45:01.802174 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29322165-7jgsp" podStartSLOduration=1.802152343 podStartE2EDuration="1.802152343s" podCreationTimestamp="2025-10-01 14:45:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 14:45:01.769208714 +0000 UTC m=+3624.513184922" watchObservedRunningTime="2025-10-01 14:45:01.802152343 +0000 UTC m=+3624.546128551" Oct 01 14:45:02 crc kubenswrapper[4605]: I1001 14:45:02.762757 4605 generic.go:334] "Generic (PLEG): container finished" podID="470ca42d-555a-4e87-8387-520d65563663" containerID="be947812721b04352f08dca497534feb00d453e95f260b33cdac4667bfc7c921" exitCode=0 Oct 01 14:45:02 crc kubenswrapper[4605]: I1001 14:45:02.762958 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29322165-7jgsp" event={"ID":"470ca42d-555a-4e87-8387-520d65563663","Type":"ContainerDied","Data":"be947812721b04352f08dca497534feb00d453e95f260b33cdac4667bfc7c921"} Oct 01 14:45:04 crc kubenswrapper[4605]: I1001 14:45:04.151334 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29322165-7jgsp" Oct 01 14:45:04 crc kubenswrapper[4605]: I1001 14:45:04.261947 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/470ca42d-555a-4e87-8387-520d65563663-secret-volume\") pod \"470ca42d-555a-4e87-8387-520d65563663\" (UID: \"470ca42d-555a-4e87-8387-520d65563663\") " Oct 01 14:45:04 crc kubenswrapper[4605]: I1001 14:45:04.262023 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/470ca42d-555a-4e87-8387-520d65563663-config-volume\") pod \"470ca42d-555a-4e87-8387-520d65563663\" (UID: \"470ca42d-555a-4e87-8387-520d65563663\") " Oct 01 14:45:04 crc kubenswrapper[4605]: I1001 14:45:04.262137 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lff4b\" (UniqueName: \"kubernetes.io/projected/470ca42d-555a-4e87-8387-520d65563663-kube-api-access-lff4b\") pod \"470ca42d-555a-4e87-8387-520d65563663\" (UID: \"470ca42d-555a-4e87-8387-520d65563663\") " Oct 01 14:45:04 crc kubenswrapper[4605]: I1001 14:45:04.263053 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/470ca42d-555a-4e87-8387-520d65563663-config-volume" (OuterVolumeSpecName: "config-volume") pod "470ca42d-555a-4e87-8387-520d65563663" (UID: "470ca42d-555a-4e87-8387-520d65563663"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 14:45:04 crc kubenswrapper[4605]: I1001 14:45:04.263808 4605 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/470ca42d-555a-4e87-8387-520d65563663-config-volume\") on node \"crc\" DevicePath \"\"" Oct 01 14:45:04 crc kubenswrapper[4605]: I1001 14:45:04.269060 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/470ca42d-555a-4e87-8387-520d65563663-kube-api-access-lff4b" (OuterVolumeSpecName: "kube-api-access-lff4b") pod "470ca42d-555a-4e87-8387-520d65563663" (UID: "470ca42d-555a-4e87-8387-520d65563663"). InnerVolumeSpecName "kube-api-access-lff4b". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:45:04 crc kubenswrapper[4605]: I1001 14:45:04.279043 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/470ca42d-555a-4e87-8387-520d65563663-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "470ca42d-555a-4e87-8387-520d65563663" (UID: "470ca42d-555a-4e87-8387-520d65563663"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 14:45:04 crc kubenswrapper[4605]: I1001 14:45:04.365577 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lff4b\" (UniqueName: \"kubernetes.io/projected/470ca42d-555a-4e87-8387-520d65563663-kube-api-access-lff4b\") on node \"crc\" DevicePath \"\"" Oct 01 14:45:04 crc kubenswrapper[4605]: I1001 14:45:04.365616 4605 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/470ca42d-555a-4e87-8387-520d65563663-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 01 14:45:04 crc kubenswrapper[4605]: I1001 14:45:04.782322 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29322165-7jgsp" event={"ID":"470ca42d-555a-4e87-8387-520d65563663","Type":"ContainerDied","Data":"eeca8e371687c738d8333edbf83693e6e344bf06eaea69bdb23b2fef633b9bdc"} Oct 01 14:45:04 crc kubenswrapper[4605]: I1001 14:45:04.782646 4605 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="eeca8e371687c738d8333edbf83693e6e344bf06eaea69bdb23b2fef633b9bdc" Oct 01 14:45:04 crc kubenswrapper[4605]: I1001 14:45:04.782371 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29322165-7jgsp" Oct 01 14:45:05 crc kubenswrapper[4605]: I1001 14:45:05.242380 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29322120-gl44q"] Oct 01 14:45:05 crc kubenswrapper[4605]: I1001 14:45:05.250006 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29322120-gl44q"] Oct 01 14:45:05 crc kubenswrapper[4605]: I1001 14:45:05.938977 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="51dbd5d6-7c1d-4116-86d6-49d5108524cd" path="/var/lib/kubelet/pods/51dbd5d6-7c1d-4116-86d6-49d5108524cd/volumes" Oct 01 14:45:21 crc kubenswrapper[4605]: I1001 14:45:21.631268 4605 patch_prober.go:28] interesting pod/machine-config-daemon-zdjh7 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 14:45:21 crc kubenswrapper[4605]: I1001 14:45:21.631918 4605 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 14:45:26 crc kubenswrapper[4605]: I1001 14:45:26.852929 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-59b677b5cd-92trn_b92ac133-ded4-4276-a43a-7d9414d051ab/barbican-api-log/0.log" Oct 01 14:45:26 crc kubenswrapper[4605]: I1001 14:45:26.907857 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-59b677b5cd-92trn_b92ac133-ded4-4276-a43a-7d9414d051ab/barbican-api/0.log" Oct 01 14:45:27 crc kubenswrapper[4605]: I1001 14:45:27.161400 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-66797cbc6-zrzb7_991f1482-e0da-43ae-89c5-bb9a5beaee2f/barbican-keystone-listener/0.log" Oct 01 14:45:27 crc kubenswrapper[4605]: I1001 14:45:27.381927 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-66797cbc6-zrzb7_991f1482-e0da-43ae-89c5-bb9a5beaee2f/barbican-keystone-listener-log/0.log" Oct 01 14:45:27 crc kubenswrapper[4605]: I1001 14:45:27.428433 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-798f8d7567-pfjfn_6f8f7068-62be-4ee7-9f6a-63812a2f5413/barbican-worker/0.log" Oct 01 14:45:27 crc kubenswrapper[4605]: I1001 14:45:27.533742 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-798f8d7567-pfjfn_6f8f7068-62be-4ee7-9f6a-63812a2f5413/barbican-worker-log/0.log" Oct 01 14:45:27 crc kubenswrapper[4605]: I1001 14:45:27.708803 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-z9rb2_b1ad20ed-a28e-4787-ba19-58f0d1a1e5b6/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 14:45:27 crc kubenswrapper[4605]: I1001 14:45:27.887413 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_be76fa59-fd75-498e-9168-fa355659b827/ceilometer-central-agent/0.log" Oct 01 14:45:27 crc kubenswrapper[4605]: I1001 14:45:27.924174 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_be76fa59-fd75-498e-9168-fa355659b827/ceilometer-notification-agent/0.log" Oct 01 14:45:28 crc kubenswrapper[4605]: I1001 14:45:28.037152 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_be76fa59-fd75-498e-9168-fa355659b827/proxy-httpd/0.log" Oct 01 14:45:28 crc kubenswrapper[4605]: I1001 14:45:28.101465 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_be76fa59-fd75-498e-9168-fa355659b827/sg-core/0.log" Oct 01 14:45:28 crc kubenswrapper[4605]: I1001 14:45:28.327154 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_6a6bf8ea-cc99-43f4-913c-59039f5e11db/cinder-api/0.log" Oct 01 14:45:28 crc kubenswrapper[4605]: I1001 14:45:28.334228 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_6a6bf8ea-cc99-43f4-913c-59039f5e11db/cinder-api-log/0.log" Oct 01 14:45:28 crc kubenswrapper[4605]: I1001 14:45:28.593702 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_675a2ec1-82ad-4b20-a077-d8d427108ce7/probe/0.log" Oct 01 14:45:28 crc kubenswrapper[4605]: I1001 14:45:28.604966 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_675a2ec1-82ad-4b20-a077-d8d427108ce7/cinder-scheduler/0.log" Oct 01 14:45:28 crc kubenswrapper[4605]: I1001 14:45:28.803452 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-9pc94_31cfc16a-0d93-4cc4-9281-e4cee9664772/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 14:45:28 crc kubenswrapper[4605]: I1001 14:45:28.943217 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-b6ldt_84e4e275-5c28-4ea8-bf23-154b3aaa036d/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 14:45:29 crc kubenswrapper[4605]: I1001 14:45:29.143449 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-g6xmd_283ccb8d-6321-440f-a0a6-f2118a4f9bf5/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 14:45:29 crc kubenswrapper[4605]: I1001 14:45:29.299076 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-6bcf8b9d95-kpkj4_cfe22295-abd7-4094-b93e-3fb24d38242c/init/0.log" Oct 01 14:45:29 crc kubenswrapper[4605]: I1001 14:45:29.569301 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-6bcf8b9d95-kpkj4_cfe22295-abd7-4094-b93e-3fb24d38242c/init/0.log" Oct 01 14:45:29 crc kubenswrapper[4605]: I1001 14:45:29.609550 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-6bcf8b9d95-kpkj4_cfe22295-abd7-4094-b93e-3fb24d38242c/dnsmasq-dns/0.log" Oct 01 14:45:29 crc kubenswrapper[4605]: I1001 14:45:29.833290 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-8c89v_88958218-6061-4e38-b6fd-88b9502ebf30/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 14:45:30 crc kubenswrapper[4605]: I1001 14:45:30.047434 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_eac8da6a-ca40-4b05-b525-d645a20f3592/glance-httpd/0.log" Oct 01 14:45:30 crc kubenswrapper[4605]: I1001 14:45:30.188868 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_eac8da6a-ca40-4b05-b525-d645a20f3592/glance-log/0.log" Oct 01 14:45:30 crc kubenswrapper[4605]: I1001 14:45:30.229788 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_2cbd5fa3-f41d-4f68-a0f3-ef876a17a0b6/glance-httpd/0.log" Oct 01 14:45:30 crc kubenswrapper[4605]: I1001 14:45:30.395416 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_2cbd5fa3-f41d-4f68-a0f3-ef876a17a0b6/glance-log/0.log" Oct 01 14:45:30 crc kubenswrapper[4605]: I1001 14:45:30.597556 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-64d6df575b-5ctbf_80fb1c51-bd86-4896-8dac-59747473f066/horizon/0.log" Oct 01 14:45:30 crc kubenswrapper[4605]: I1001 14:45:30.732182 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-7mpwp_3187f065-743c-4531-93b0-12c666bdd4c3/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 14:45:30 crc kubenswrapper[4605]: I1001 14:45:30.813883 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-64d6df575b-5ctbf_80fb1c51-bd86-4896-8dac-59747473f066/horizon-log/0.log" Oct 01 14:45:30 crc kubenswrapper[4605]: I1001 14:45:30.934258 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-95t2p_0a383a44-66c3-466b-977f-4297fa2f9718/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 14:45:31 crc kubenswrapper[4605]: I1001 14:45:31.258105 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_d74d1f54-092e-4bd7-90f4-e6ad8e4a77ea/kube-state-metrics/0.log" Oct 01 14:45:31 crc kubenswrapper[4605]: I1001 14:45:31.265977 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-6964b49dc5-fgw45_471f90e3-9942-4516-ad5a-26cddd148bd4/keystone-api/0.log" Oct 01 14:45:31 crc kubenswrapper[4605]: I1001 14:45:31.443226 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-tlk9z_9df7ad58-d542-4c8a-89fb-464689d1729c/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 14:45:31 crc kubenswrapper[4605]: I1001 14:45:31.736943 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-f85558977-q9rhp_67c3654e-3eed-4260-8864-3ab0334a32a0/neutron-api/0.log" Oct 01 14:45:31 crc kubenswrapper[4605]: I1001 14:45:31.850746 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-f85558977-q9rhp_67c3654e-3eed-4260-8864-3ab0334a32a0/neutron-httpd/0.log" Oct 01 14:45:31 crc kubenswrapper[4605]: I1001 14:45:31.996481 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-wgm6n_338fec8e-40aa-4170-9f63-dd6ae6607d2d/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 14:45:32 crc kubenswrapper[4605]: I1001 14:45:32.391154 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_885f1ef5-027b-49b0-9c25-444a307d3075/nova-api-log/0.log" Oct 01 14:45:32 crc kubenswrapper[4605]: I1001 14:45:32.708274 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_885f1ef5-027b-49b0-9c25-444a307d3075/nova-api-api/0.log" Oct 01 14:45:32 crc kubenswrapper[4605]: I1001 14:45:32.779370 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_c93e48d5-b3b1-4390-bb46-308151d80e4e/nova-cell0-conductor-conductor/0.log" Oct 01 14:45:33 crc kubenswrapper[4605]: I1001 14:45:33.160290 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_02fa0bb4-5a25-43da-8f92-a8c0ca715032/nova-cell1-novncproxy-novncproxy/0.log" Oct 01 14:45:33 crc kubenswrapper[4605]: I1001 14:45:33.227513 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_97d9b4b0-ddd6-46ce-9c48-add8f2f3e0b2/nova-cell1-conductor-conductor/0.log" Oct 01 14:45:33 crc kubenswrapper[4605]: I1001 14:45:33.494419 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-v8zqd_07ba7a39-5510-4075-b789-aa61ef2643f5/nova-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 14:45:33 crc kubenswrapper[4605]: I1001 14:45:33.811131 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_05f7dd13-a8f8-4263-ad0f-87d5972c6eb0/nova-metadata-log/0.log" Oct 01 14:45:34 crc kubenswrapper[4605]: I1001 14:45:34.256559 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_3c7bfc3c-d34b-4bcf-ba1c-71ed87c74927/nova-scheduler-scheduler/0.log" Oct 01 14:45:34 crc kubenswrapper[4605]: I1001 14:45:34.452334 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_c414b65e-0cce-4d58-aa5d-08d0679595cd/mysql-bootstrap/0.log" Oct 01 14:45:34 crc kubenswrapper[4605]: I1001 14:45:34.629764 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_c414b65e-0cce-4d58-aa5d-08d0679595cd/mysql-bootstrap/0.log" Oct 01 14:45:34 crc kubenswrapper[4605]: I1001 14:45:34.830218 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_c414b65e-0cce-4d58-aa5d-08d0679595cd/galera/0.log" Oct 01 14:45:34 crc kubenswrapper[4605]: I1001 14:45:34.955790 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_05f7dd13-a8f8-4263-ad0f-87d5972c6eb0/nova-metadata-metadata/0.log" Oct 01 14:45:35 crc kubenswrapper[4605]: I1001 14:45:35.094287 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_e3e2c30f-0d23-4f2a-8cc5-5bacf9bcce9e/mysql-bootstrap/0.log" Oct 01 14:45:35 crc kubenswrapper[4605]: I1001 14:45:35.248854 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_e3e2c30f-0d23-4f2a-8cc5-5bacf9bcce9e/mysql-bootstrap/0.log" Oct 01 14:45:35 crc kubenswrapper[4605]: I1001 14:45:35.322449 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_e3e2c30f-0d23-4f2a-8cc5-5bacf9bcce9e/galera/0.log" Oct 01 14:45:35 crc kubenswrapper[4605]: I1001 14:45:35.548619 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_58264b9f-ddeb-466d-94aa-536c1a381308/openstackclient/0.log" Oct 01 14:45:35 crc kubenswrapper[4605]: I1001 14:45:35.761667 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-jvb44_a37367ae-0e7b-4ad1-afb4-c48ca6282706/ovn-controller/0.log" Oct 01 14:45:35 crc kubenswrapper[4605]: I1001 14:45:35.927207 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-ppfrt_2f65a22d-7b5c-43b7-88dc-c94ce75bf8b4/openstack-network-exporter/0.log" Oct 01 14:45:36 crc kubenswrapper[4605]: I1001 14:45:36.193274 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-csqtk_9e9376a7-1282-4f2c-b437-bf6eb57d2739/ovsdb-server-init/0.log" Oct 01 14:45:36 crc kubenswrapper[4605]: I1001 14:45:36.359723 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-csqtk_9e9376a7-1282-4f2c-b437-bf6eb57d2739/ovsdb-server-init/0.log" Oct 01 14:45:36 crc kubenswrapper[4605]: I1001 14:45:36.471287 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-csqtk_9e9376a7-1282-4f2c-b437-bf6eb57d2739/ovs-vswitchd/0.log" Oct 01 14:45:36 crc kubenswrapper[4605]: I1001 14:45:36.494142 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-csqtk_9e9376a7-1282-4f2c-b437-bf6eb57d2739/ovsdb-server/0.log" Oct 01 14:45:36 crc kubenswrapper[4605]: I1001 14:45:36.834546 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-x2z69_64e9c2e1-759a-4bb7-9fd4-56190af5f1b9/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 14:45:37 crc kubenswrapper[4605]: I1001 14:45:37.038756 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_e2f8bf30-b59a-4564-b6d2-5f201b0fe957/openstack-network-exporter/0.log" Oct 01 14:45:37 crc kubenswrapper[4605]: I1001 14:45:37.096203 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_e2f8bf30-b59a-4564-b6d2-5f201b0fe957/ovn-northd/0.log" Oct 01 14:45:37 crc kubenswrapper[4605]: I1001 14:45:37.321749 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_70acc9bd-54b2-4c70-bf3f-ce66a88bbd06/openstack-network-exporter/0.log" Oct 01 14:45:37 crc kubenswrapper[4605]: I1001 14:45:37.458079 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_70acc9bd-54b2-4c70-bf3f-ce66a88bbd06/ovsdbserver-nb/0.log" Oct 01 14:45:37 crc kubenswrapper[4605]: I1001 14:45:37.601773 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_e3eaff33-3a5d-4868-ba47-a03e7ac13ab5/openstack-network-exporter/0.log" Oct 01 14:45:37 crc kubenswrapper[4605]: I1001 14:45:37.756788 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_e3eaff33-3a5d-4868-ba47-a03e7ac13ab5/ovsdbserver-sb/0.log" Oct 01 14:45:37 crc kubenswrapper[4605]: I1001 14:45:37.963213 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-5cc6b8b7dd-f8khf_2524f72b-05b3-4299-90d6-4671b410d59a/placement-api/0.log" Oct 01 14:45:38 crc kubenswrapper[4605]: I1001 14:45:38.131842 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-5cc6b8b7dd-f8khf_2524f72b-05b3-4299-90d6-4671b410d59a/placement-log/0.log" Oct 01 14:45:38 crc kubenswrapper[4605]: I1001 14:45:38.302237 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_9d1c480b-5fd5-4134-913c-19381d8f4db4/setup-container/0.log" Oct 01 14:45:38 crc kubenswrapper[4605]: I1001 14:45:38.403268 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_9d1c480b-5fd5-4134-913c-19381d8f4db4/setup-container/0.log" Oct 01 14:45:38 crc kubenswrapper[4605]: I1001 14:45:38.524217 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_9d1c480b-5fd5-4134-913c-19381d8f4db4/rabbitmq/0.log" Oct 01 14:45:38 crc kubenswrapper[4605]: I1001 14:45:38.926677 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_1b035ab1-17f0-4d9e-91d4-983b4cd06469/setup-container/0.log" Oct 01 14:45:39 crc kubenswrapper[4605]: I1001 14:45:39.186352 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_1b035ab1-17f0-4d9e-91d4-983b4cd06469/setup-container/0.log" Oct 01 14:45:39 crc kubenswrapper[4605]: I1001 14:45:39.209701 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_1b035ab1-17f0-4d9e-91d4-983b4cd06469/rabbitmq/0.log" Oct 01 14:45:39 crc kubenswrapper[4605]: I1001 14:45:39.445626 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-n4v8b_0f112a96-c395-4ae9-8960-596266eb98b0/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 14:45:39 crc kubenswrapper[4605]: I1001 14:45:39.682669 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-tps9f_6e328f7a-3f9b-48c7-b277-cf0f99b9bf86/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 14:45:39 crc kubenswrapper[4605]: I1001 14:45:39.989907 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-5lfmw_0ce2e0e7-1cfa-4fcb-87d3-214503a56fff/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 14:45:39 crc kubenswrapper[4605]: I1001 14:45:39.991696 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-s4rg2_9cd5ee34-51d2-4a40-9312-e83bf07927b7/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 14:45:40 crc kubenswrapper[4605]: I1001 14:45:40.282400 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-ql62n_375a2afa-b804-4227-8137-eb7c5c56d8fb/ssh-known-hosts-edpm-deployment/0.log" Oct 01 14:45:40 crc kubenswrapper[4605]: I1001 14:45:40.540799 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-66b5967899-cv4c4_7ae58440-10a5-44a6-94e8-89d112c67651/proxy-server/0.log" Oct 01 14:45:40 crc kubenswrapper[4605]: I1001 14:45:40.708806 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-66b5967899-cv4c4_7ae58440-10a5-44a6-94e8-89d112c67651/proxy-httpd/0.log" Oct 01 14:45:40 crc kubenswrapper[4605]: I1001 14:45:40.915608 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-x8zcm_4c3df3b9-829b-4ebb-9593-487b1f6ddce1/swift-ring-rebalance/0.log" Oct 01 14:45:40 crc kubenswrapper[4605]: I1001 14:45:40.971498 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_fedbab19-fa82-4d92-b787-de85226cd34f/account-auditor/0.log" Oct 01 14:45:41 crc kubenswrapper[4605]: I1001 14:45:41.197073 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_fedbab19-fa82-4d92-b787-de85226cd34f/account-reaper/0.log" Oct 01 14:45:41 crc kubenswrapper[4605]: I1001 14:45:41.294894 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_fedbab19-fa82-4d92-b787-de85226cd34f/account-replicator/0.log" Oct 01 14:45:41 crc kubenswrapper[4605]: I1001 14:45:41.307769 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_fedbab19-fa82-4d92-b787-de85226cd34f/account-server/0.log" Oct 01 14:45:41 crc kubenswrapper[4605]: I1001 14:45:41.487257 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_fedbab19-fa82-4d92-b787-de85226cd34f/container-auditor/0.log" Oct 01 14:45:41 crc kubenswrapper[4605]: I1001 14:45:41.590869 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_fedbab19-fa82-4d92-b787-de85226cd34f/container-replicator/0.log" Oct 01 14:45:41 crc kubenswrapper[4605]: I1001 14:45:41.620754 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_fedbab19-fa82-4d92-b787-de85226cd34f/container-server/0.log" Oct 01 14:45:41 crc kubenswrapper[4605]: I1001 14:45:41.838432 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_fedbab19-fa82-4d92-b787-de85226cd34f/container-updater/0.log" Oct 01 14:45:41 crc kubenswrapper[4605]: I1001 14:45:41.904244 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_fedbab19-fa82-4d92-b787-de85226cd34f/object-auditor/0.log" Oct 01 14:45:41 crc kubenswrapper[4605]: I1001 14:45:41.951531 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_fedbab19-fa82-4d92-b787-de85226cd34f/object-expirer/0.log" Oct 01 14:45:42 crc kubenswrapper[4605]: I1001 14:45:42.183058 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_fedbab19-fa82-4d92-b787-de85226cd34f/object-replicator/0.log" Oct 01 14:45:42 crc kubenswrapper[4605]: I1001 14:45:42.257651 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_fedbab19-fa82-4d92-b787-de85226cd34f/object-server/0.log" Oct 01 14:45:42 crc kubenswrapper[4605]: I1001 14:45:42.258663 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_fedbab19-fa82-4d92-b787-de85226cd34f/object-updater/0.log" Oct 01 14:45:42 crc kubenswrapper[4605]: I1001 14:45:42.474943 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_fedbab19-fa82-4d92-b787-de85226cd34f/rsync/0.log" Oct 01 14:45:42 crc kubenswrapper[4605]: I1001 14:45:42.500387 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_fedbab19-fa82-4d92-b787-de85226cd34f/swift-recon-cron/0.log" Oct 01 14:45:42 crc kubenswrapper[4605]: I1001 14:45:42.822897 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-6bd7l_58600359-0fa8-4801-a1d3-87598ba13651/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 14:45:42 crc kubenswrapper[4605]: I1001 14:45:42.927296 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_2de51de5-4325-49f8-9179-f18e4de5fd46/tempest-tests-tempest-tests-runner/0.log" Oct 01 14:45:43 crc kubenswrapper[4605]: I1001 14:45:43.141388 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_be5d33d0-2abd-424f-a518-f5eb5aa62661/test-operator-logs-container/0.log" Oct 01 14:45:43 crc kubenswrapper[4605]: I1001 14:45:43.512089 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-5jnc8_94e0445e-6b97-4b10-80a3-5d8827ce0120/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Oct 01 14:45:50 crc kubenswrapper[4605]: I1001 14:45:50.680472 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_cfe08201-447e-4697-95cb-dfdf59dfdbe9/memcached/0.log" Oct 01 14:45:51 crc kubenswrapper[4605]: I1001 14:45:51.631272 4605 patch_prober.go:28] interesting pod/machine-config-daemon-zdjh7 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 14:45:51 crc kubenswrapper[4605]: I1001 14:45:51.631325 4605 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 14:45:52 crc kubenswrapper[4605]: I1001 14:45:52.480220 4605 scope.go:117] "RemoveContainer" containerID="5b95fcac86feb6959cd5dc4750bc7a500dc0438b98356893f5bf9f4ee54d15cc" Oct 01 14:45:52 crc kubenswrapper[4605]: I1001 14:45:52.522674 4605 scope.go:117] "RemoveContainer" containerID="b17679075d0a75aff5d9b6f3542463f5f5c63fd64e7c6b6bd7fe90e5a670c1fb" Oct 01 14:46:21 crc kubenswrapper[4605]: I1001 14:46:21.631007 4605 patch_prober.go:28] interesting pod/machine-config-daemon-zdjh7 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 14:46:21 crc kubenswrapper[4605]: I1001 14:46:21.631557 4605 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 14:46:21 crc kubenswrapper[4605]: I1001 14:46:21.631601 4605 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" Oct 01 14:46:21 crc kubenswrapper[4605]: I1001 14:46:21.632337 4605 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"47fcc9d09b30d5b1909f661246ae31a622aeee683f33f29650e6dae968163ac2"} pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 01 14:46:21 crc kubenswrapper[4605]: I1001 14:46:21.632389 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" containerName="machine-config-daemon" containerID="cri-o://47fcc9d09b30d5b1909f661246ae31a622aeee683f33f29650e6dae968163ac2" gracePeriod=600 Oct 01 14:46:21 crc kubenswrapper[4605]: E1001 14:46:21.794420 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:46:22 crc kubenswrapper[4605]: I1001 14:46:22.485558 4605 generic.go:334] "Generic (PLEG): container finished" podID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" containerID="47fcc9d09b30d5b1909f661246ae31a622aeee683f33f29650e6dae968163ac2" exitCode=0 Oct 01 14:46:22 crc kubenswrapper[4605]: I1001 14:46:22.485606 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" event={"ID":"f3023060-c8ae-492b-b1cb-a418d9a8e59f","Type":"ContainerDied","Data":"47fcc9d09b30d5b1909f661246ae31a622aeee683f33f29650e6dae968163ac2"} Oct 01 14:46:22 crc kubenswrapper[4605]: I1001 14:46:22.485952 4605 scope.go:117] "RemoveContainer" containerID="d8795f6535dab4a31d73cbcc308fbff0269900c90034691649da24daf04c865a" Oct 01 14:46:22 crc kubenswrapper[4605]: I1001 14:46:22.487389 4605 scope.go:117] "RemoveContainer" containerID="47fcc9d09b30d5b1909f661246ae31a622aeee683f33f29650e6dae968163ac2" Oct 01 14:46:22 crc kubenswrapper[4605]: E1001 14:46:22.487899 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:46:27 crc kubenswrapper[4605]: I1001 14:46:27.531411 4605 generic.go:334] "Generic (PLEG): container finished" podID="f1e68ef6-82ee-4ae2-b12f-980de1bcc8d0" containerID="ccbd25a81b423e652a742023578f0f25a058374af9af64600a62c2459880cc18" exitCode=0 Oct 01 14:46:27 crc kubenswrapper[4605]: I1001 14:46:27.531554 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-4kxmw/crc-debug-pqgr9" event={"ID":"f1e68ef6-82ee-4ae2-b12f-980de1bcc8d0","Type":"ContainerDied","Data":"ccbd25a81b423e652a742023578f0f25a058374af9af64600a62c2459880cc18"} Oct 01 14:46:28 crc kubenswrapper[4605]: I1001 14:46:28.652171 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4kxmw/crc-debug-pqgr9" Oct 01 14:46:28 crc kubenswrapper[4605]: I1001 14:46:28.688664 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-4kxmw/crc-debug-pqgr9"] Oct 01 14:46:28 crc kubenswrapper[4605]: I1001 14:46:28.696347 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-4kxmw/crc-debug-pqgr9"] Oct 01 14:46:28 crc kubenswrapper[4605]: I1001 14:46:28.763884 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f1e68ef6-82ee-4ae2-b12f-980de1bcc8d0-host\") pod \"f1e68ef6-82ee-4ae2-b12f-980de1bcc8d0\" (UID: \"f1e68ef6-82ee-4ae2-b12f-980de1bcc8d0\") " Oct 01 14:46:28 crc kubenswrapper[4605]: I1001 14:46:28.764028 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2vkfj\" (UniqueName: \"kubernetes.io/projected/f1e68ef6-82ee-4ae2-b12f-980de1bcc8d0-kube-api-access-2vkfj\") pod \"f1e68ef6-82ee-4ae2-b12f-980de1bcc8d0\" (UID: \"f1e68ef6-82ee-4ae2-b12f-980de1bcc8d0\") " Oct 01 14:46:28 crc kubenswrapper[4605]: I1001 14:46:28.764045 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f1e68ef6-82ee-4ae2-b12f-980de1bcc8d0-host" (OuterVolumeSpecName: "host") pod "f1e68ef6-82ee-4ae2-b12f-980de1bcc8d0" (UID: "f1e68ef6-82ee-4ae2-b12f-980de1bcc8d0"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 14:46:28 crc kubenswrapper[4605]: I1001 14:46:28.764575 4605 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f1e68ef6-82ee-4ae2-b12f-980de1bcc8d0-host\") on node \"crc\" DevicePath \"\"" Oct 01 14:46:28 crc kubenswrapper[4605]: I1001 14:46:28.770381 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f1e68ef6-82ee-4ae2-b12f-980de1bcc8d0-kube-api-access-2vkfj" (OuterVolumeSpecName: "kube-api-access-2vkfj") pod "f1e68ef6-82ee-4ae2-b12f-980de1bcc8d0" (UID: "f1e68ef6-82ee-4ae2-b12f-980de1bcc8d0"). InnerVolumeSpecName "kube-api-access-2vkfj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:46:28 crc kubenswrapper[4605]: I1001 14:46:28.867332 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2vkfj\" (UniqueName: \"kubernetes.io/projected/f1e68ef6-82ee-4ae2-b12f-980de1bcc8d0-kube-api-access-2vkfj\") on node \"crc\" DevicePath \"\"" Oct 01 14:46:29 crc kubenswrapper[4605]: I1001 14:46:29.549951 4605 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ee61e022ddf9c80dc374ea38f55dcce456043f24c31c16f4befc96e3ae50b268" Oct 01 14:46:29 crc kubenswrapper[4605]: I1001 14:46:29.550044 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4kxmw/crc-debug-pqgr9" Oct 01 14:46:29 crc kubenswrapper[4605]: I1001 14:46:29.939426 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f1e68ef6-82ee-4ae2-b12f-980de1bcc8d0" path="/var/lib/kubelet/pods/f1e68ef6-82ee-4ae2-b12f-980de1bcc8d0/volumes" Oct 01 14:46:29 crc kubenswrapper[4605]: I1001 14:46:29.947445 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-4kxmw/crc-debug-jz65f"] Oct 01 14:46:29 crc kubenswrapper[4605]: E1001 14:46:29.947951 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1e68ef6-82ee-4ae2-b12f-980de1bcc8d0" containerName="container-00" Oct 01 14:46:29 crc kubenswrapper[4605]: I1001 14:46:29.947967 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1e68ef6-82ee-4ae2-b12f-980de1bcc8d0" containerName="container-00" Oct 01 14:46:29 crc kubenswrapper[4605]: E1001 14:46:29.947990 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="470ca42d-555a-4e87-8387-520d65563663" containerName="collect-profiles" Oct 01 14:46:29 crc kubenswrapper[4605]: I1001 14:46:29.947996 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="470ca42d-555a-4e87-8387-520d65563663" containerName="collect-profiles" Oct 01 14:46:29 crc kubenswrapper[4605]: I1001 14:46:29.948216 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="f1e68ef6-82ee-4ae2-b12f-980de1bcc8d0" containerName="container-00" Oct 01 14:46:29 crc kubenswrapper[4605]: I1001 14:46:29.948232 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="470ca42d-555a-4e87-8387-520d65563663" containerName="collect-profiles" Oct 01 14:46:29 crc kubenswrapper[4605]: I1001 14:46:29.948840 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4kxmw/crc-debug-jz65f" Oct 01 14:46:29 crc kubenswrapper[4605]: I1001 14:46:29.950976 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-4kxmw"/"default-dockercfg-lncvf" Oct 01 14:46:30 crc kubenswrapper[4605]: I1001 14:46:30.098116 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3b464fa5-1787-42ed-a315-dc9e1918f0b6-host\") pod \"crc-debug-jz65f\" (UID: \"3b464fa5-1787-42ed-a315-dc9e1918f0b6\") " pod="openshift-must-gather-4kxmw/crc-debug-jz65f" Oct 01 14:46:30 crc kubenswrapper[4605]: I1001 14:46:30.098213 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7lpjt\" (UniqueName: \"kubernetes.io/projected/3b464fa5-1787-42ed-a315-dc9e1918f0b6-kube-api-access-7lpjt\") pod \"crc-debug-jz65f\" (UID: \"3b464fa5-1787-42ed-a315-dc9e1918f0b6\") " pod="openshift-must-gather-4kxmw/crc-debug-jz65f" Oct 01 14:46:30 crc kubenswrapper[4605]: I1001 14:46:30.200377 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3b464fa5-1787-42ed-a315-dc9e1918f0b6-host\") pod \"crc-debug-jz65f\" (UID: \"3b464fa5-1787-42ed-a315-dc9e1918f0b6\") " pod="openshift-must-gather-4kxmw/crc-debug-jz65f" Oct 01 14:46:30 crc kubenswrapper[4605]: I1001 14:46:30.200495 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3b464fa5-1787-42ed-a315-dc9e1918f0b6-host\") pod \"crc-debug-jz65f\" (UID: \"3b464fa5-1787-42ed-a315-dc9e1918f0b6\") " pod="openshift-must-gather-4kxmw/crc-debug-jz65f" Oct 01 14:46:30 crc kubenswrapper[4605]: I1001 14:46:30.201449 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7lpjt\" (UniqueName: \"kubernetes.io/projected/3b464fa5-1787-42ed-a315-dc9e1918f0b6-kube-api-access-7lpjt\") pod \"crc-debug-jz65f\" (UID: \"3b464fa5-1787-42ed-a315-dc9e1918f0b6\") " pod="openshift-must-gather-4kxmw/crc-debug-jz65f" Oct 01 14:46:30 crc kubenswrapper[4605]: I1001 14:46:30.225923 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7lpjt\" (UniqueName: \"kubernetes.io/projected/3b464fa5-1787-42ed-a315-dc9e1918f0b6-kube-api-access-7lpjt\") pod \"crc-debug-jz65f\" (UID: \"3b464fa5-1787-42ed-a315-dc9e1918f0b6\") " pod="openshift-must-gather-4kxmw/crc-debug-jz65f" Oct 01 14:46:30 crc kubenswrapper[4605]: I1001 14:46:30.269836 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4kxmw/crc-debug-jz65f" Oct 01 14:46:30 crc kubenswrapper[4605]: I1001 14:46:30.560272 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-4kxmw/crc-debug-jz65f" event={"ID":"3b464fa5-1787-42ed-a315-dc9e1918f0b6","Type":"ContainerStarted","Data":"421faceceb0e75cd260f66aaca2f5230e519c2428144fded2314024df6f31e27"} Oct 01 14:46:31 crc kubenswrapper[4605]: I1001 14:46:31.569985 4605 generic.go:334] "Generic (PLEG): container finished" podID="3b464fa5-1787-42ed-a315-dc9e1918f0b6" containerID="3ca0bb5d099b6a7b3821e60f441d7285ee2a1d68b534c52ac2e8b03ac1e9ebd2" exitCode=0 Oct 01 14:46:31 crc kubenswrapper[4605]: I1001 14:46:31.571313 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-4kxmw/crc-debug-jz65f" event={"ID":"3b464fa5-1787-42ed-a315-dc9e1918f0b6","Type":"ContainerDied","Data":"3ca0bb5d099b6a7b3821e60f441d7285ee2a1d68b534c52ac2e8b03ac1e9ebd2"} Oct 01 14:46:32 crc kubenswrapper[4605]: I1001 14:46:32.687864 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4kxmw/crc-debug-jz65f" Oct 01 14:46:32 crc kubenswrapper[4605]: I1001 14:46:32.845064 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3b464fa5-1787-42ed-a315-dc9e1918f0b6-host\") pod \"3b464fa5-1787-42ed-a315-dc9e1918f0b6\" (UID: \"3b464fa5-1787-42ed-a315-dc9e1918f0b6\") " Oct 01 14:46:32 crc kubenswrapper[4605]: I1001 14:46:32.845236 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3b464fa5-1787-42ed-a315-dc9e1918f0b6-host" (OuterVolumeSpecName: "host") pod "3b464fa5-1787-42ed-a315-dc9e1918f0b6" (UID: "3b464fa5-1787-42ed-a315-dc9e1918f0b6"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 14:46:32 crc kubenswrapper[4605]: I1001 14:46:32.845372 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7lpjt\" (UniqueName: \"kubernetes.io/projected/3b464fa5-1787-42ed-a315-dc9e1918f0b6-kube-api-access-7lpjt\") pod \"3b464fa5-1787-42ed-a315-dc9e1918f0b6\" (UID: \"3b464fa5-1787-42ed-a315-dc9e1918f0b6\") " Oct 01 14:46:32 crc kubenswrapper[4605]: I1001 14:46:32.845934 4605 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3b464fa5-1787-42ed-a315-dc9e1918f0b6-host\") on node \"crc\" DevicePath \"\"" Oct 01 14:46:32 crc kubenswrapper[4605]: I1001 14:46:32.851469 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3b464fa5-1787-42ed-a315-dc9e1918f0b6-kube-api-access-7lpjt" (OuterVolumeSpecName: "kube-api-access-7lpjt") pod "3b464fa5-1787-42ed-a315-dc9e1918f0b6" (UID: "3b464fa5-1787-42ed-a315-dc9e1918f0b6"). InnerVolumeSpecName "kube-api-access-7lpjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:46:32 crc kubenswrapper[4605]: I1001 14:46:32.947350 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7lpjt\" (UniqueName: \"kubernetes.io/projected/3b464fa5-1787-42ed-a315-dc9e1918f0b6-kube-api-access-7lpjt\") on node \"crc\" DevicePath \"\"" Oct 01 14:46:33 crc kubenswrapper[4605]: I1001 14:46:33.602176 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-4kxmw/crc-debug-jz65f" event={"ID":"3b464fa5-1787-42ed-a315-dc9e1918f0b6","Type":"ContainerDied","Data":"421faceceb0e75cd260f66aaca2f5230e519c2428144fded2314024df6f31e27"} Oct 01 14:46:33 crc kubenswrapper[4605]: I1001 14:46:33.602509 4605 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="421faceceb0e75cd260f66aaca2f5230e519c2428144fded2314024df6f31e27" Oct 01 14:46:33 crc kubenswrapper[4605]: I1001 14:46:33.602226 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4kxmw/crc-debug-jz65f" Oct 01 14:46:34 crc kubenswrapper[4605]: I1001 14:46:34.926599 4605 scope.go:117] "RemoveContainer" containerID="47fcc9d09b30d5b1909f661246ae31a622aeee683f33f29650e6dae968163ac2" Oct 01 14:46:34 crc kubenswrapper[4605]: E1001 14:46:34.927146 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:46:37 crc kubenswrapper[4605]: I1001 14:46:37.451595 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-4kxmw/crc-debug-jz65f"] Oct 01 14:46:37 crc kubenswrapper[4605]: I1001 14:46:37.459512 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-4kxmw/crc-debug-jz65f"] Oct 01 14:46:37 crc kubenswrapper[4605]: I1001 14:46:37.942246 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3b464fa5-1787-42ed-a315-dc9e1918f0b6" path="/var/lib/kubelet/pods/3b464fa5-1787-42ed-a315-dc9e1918f0b6/volumes" Oct 01 14:46:38 crc kubenswrapper[4605]: I1001 14:46:38.658545 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-4kxmw/crc-debug-jfp25"] Oct 01 14:46:38 crc kubenswrapper[4605]: E1001 14:46:38.659023 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b464fa5-1787-42ed-a315-dc9e1918f0b6" containerName="container-00" Oct 01 14:46:38 crc kubenswrapper[4605]: I1001 14:46:38.659038 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b464fa5-1787-42ed-a315-dc9e1918f0b6" containerName="container-00" Oct 01 14:46:38 crc kubenswrapper[4605]: I1001 14:46:38.659329 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="3b464fa5-1787-42ed-a315-dc9e1918f0b6" containerName="container-00" Oct 01 14:46:38 crc kubenswrapper[4605]: I1001 14:46:38.660202 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4kxmw/crc-debug-jfp25" Oct 01 14:46:38 crc kubenswrapper[4605]: I1001 14:46:38.671677 4605 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-4kxmw"/"default-dockercfg-lncvf" Oct 01 14:46:38 crc kubenswrapper[4605]: I1001 14:46:38.746957 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r47t7\" (UniqueName: \"kubernetes.io/projected/57a93731-a990-49bd-abda-ef77afb625d8-kube-api-access-r47t7\") pod \"crc-debug-jfp25\" (UID: \"57a93731-a990-49bd-abda-ef77afb625d8\") " pod="openshift-must-gather-4kxmw/crc-debug-jfp25" Oct 01 14:46:38 crc kubenswrapper[4605]: I1001 14:46:38.747047 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/57a93731-a990-49bd-abda-ef77afb625d8-host\") pod \"crc-debug-jfp25\" (UID: \"57a93731-a990-49bd-abda-ef77afb625d8\") " pod="openshift-must-gather-4kxmw/crc-debug-jfp25" Oct 01 14:46:38 crc kubenswrapper[4605]: I1001 14:46:38.848670 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r47t7\" (UniqueName: \"kubernetes.io/projected/57a93731-a990-49bd-abda-ef77afb625d8-kube-api-access-r47t7\") pod \"crc-debug-jfp25\" (UID: \"57a93731-a990-49bd-abda-ef77afb625d8\") " pod="openshift-must-gather-4kxmw/crc-debug-jfp25" Oct 01 14:46:38 crc kubenswrapper[4605]: I1001 14:46:38.848738 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/57a93731-a990-49bd-abda-ef77afb625d8-host\") pod \"crc-debug-jfp25\" (UID: \"57a93731-a990-49bd-abda-ef77afb625d8\") " pod="openshift-must-gather-4kxmw/crc-debug-jfp25" Oct 01 14:46:38 crc kubenswrapper[4605]: I1001 14:46:38.848889 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/57a93731-a990-49bd-abda-ef77afb625d8-host\") pod \"crc-debug-jfp25\" (UID: \"57a93731-a990-49bd-abda-ef77afb625d8\") " pod="openshift-must-gather-4kxmw/crc-debug-jfp25" Oct 01 14:46:38 crc kubenswrapper[4605]: I1001 14:46:38.866049 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r47t7\" (UniqueName: \"kubernetes.io/projected/57a93731-a990-49bd-abda-ef77afb625d8-kube-api-access-r47t7\") pod \"crc-debug-jfp25\" (UID: \"57a93731-a990-49bd-abda-ef77afb625d8\") " pod="openshift-must-gather-4kxmw/crc-debug-jfp25" Oct 01 14:46:38 crc kubenswrapper[4605]: I1001 14:46:38.986401 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4kxmw/crc-debug-jfp25" Oct 01 14:46:39 crc kubenswrapper[4605]: I1001 14:46:39.656679 4605 generic.go:334] "Generic (PLEG): container finished" podID="57a93731-a990-49bd-abda-ef77afb625d8" containerID="8e242a41e499af3c3e5038da9c3913594ef3060ce2796ed5319205b8f3c7fc1d" exitCode=0 Oct 01 14:46:39 crc kubenswrapper[4605]: I1001 14:46:39.656751 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-4kxmw/crc-debug-jfp25" event={"ID":"57a93731-a990-49bd-abda-ef77afb625d8","Type":"ContainerDied","Data":"8e242a41e499af3c3e5038da9c3913594ef3060ce2796ed5319205b8f3c7fc1d"} Oct 01 14:46:39 crc kubenswrapper[4605]: I1001 14:46:39.657050 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-4kxmw/crc-debug-jfp25" event={"ID":"57a93731-a990-49bd-abda-ef77afb625d8","Type":"ContainerStarted","Data":"659be8e9fedb43f97bcd2dc96d389507e6fcb598ac405ffcd0fe05220c55baff"} Oct 01 14:46:39 crc kubenswrapper[4605]: I1001 14:46:39.727709 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-4kxmw/crc-debug-jfp25"] Oct 01 14:46:39 crc kubenswrapper[4605]: I1001 14:46:39.743658 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-4kxmw/crc-debug-jfp25"] Oct 01 14:46:40 crc kubenswrapper[4605]: I1001 14:46:40.761776 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4kxmw/crc-debug-jfp25" Oct 01 14:46:40 crc kubenswrapper[4605]: I1001 14:46:40.903997 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/57a93731-a990-49bd-abda-ef77afb625d8-host\") pod \"57a93731-a990-49bd-abda-ef77afb625d8\" (UID: \"57a93731-a990-49bd-abda-ef77afb625d8\") " Oct 01 14:46:40 crc kubenswrapper[4605]: I1001 14:46:40.904074 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r47t7\" (UniqueName: \"kubernetes.io/projected/57a93731-a990-49bd-abda-ef77afb625d8-kube-api-access-r47t7\") pod \"57a93731-a990-49bd-abda-ef77afb625d8\" (UID: \"57a93731-a990-49bd-abda-ef77afb625d8\") " Oct 01 14:46:40 crc kubenswrapper[4605]: I1001 14:46:40.904131 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/57a93731-a990-49bd-abda-ef77afb625d8-host" (OuterVolumeSpecName: "host") pod "57a93731-a990-49bd-abda-ef77afb625d8" (UID: "57a93731-a990-49bd-abda-ef77afb625d8"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 14:46:40 crc kubenswrapper[4605]: I1001 14:46:40.904630 4605 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/57a93731-a990-49bd-abda-ef77afb625d8-host\") on node \"crc\" DevicePath \"\"" Oct 01 14:46:40 crc kubenswrapper[4605]: I1001 14:46:40.920358 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a93731-a990-49bd-abda-ef77afb625d8-kube-api-access-r47t7" (OuterVolumeSpecName: "kube-api-access-r47t7") pod "57a93731-a990-49bd-abda-ef77afb625d8" (UID: "57a93731-a990-49bd-abda-ef77afb625d8"). InnerVolumeSpecName "kube-api-access-r47t7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:46:41 crc kubenswrapper[4605]: I1001 14:46:41.008825 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r47t7\" (UniqueName: \"kubernetes.io/projected/57a93731-a990-49bd-abda-ef77afb625d8-kube-api-access-r47t7\") on node \"crc\" DevicePath \"\"" Oct 01 14:46:41 crc kubenswrapper[4605]: I1001 14:46:41.554832 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_1c76bd37ad0c2b23ca2a6f6e5550fa40350084769f929d7952643a875emzcsd_67624804-868e-4db5-9eb8-aaec10c2a4ba/util/0.log" Oct 01 14:46:41 crc kubenswrapper[4605]: I1001 14:46:41.672639 4605 scope.go:117] "RemoveContainer" containerID="8e242a41e499af3c3e5038da9c3913594ef3060ce2796ed5319205b8f3c7fc1d" Oct 01 14:46:41 crc kubenswrapper[4605]: I1001 14:46:41.672759 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4kxmw/crc-debug-jfp25" Oct 01 14:46:41 crc kubenswrapper[4605]: I1001 14:46:41.788205 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_1c76bd37ad0c2b23ca2a6f6e5550fa40350084769f929d7952643a875emzcsd_67624804-868e-4db5-9eb8-aaec10c2a4ba/util/0.log" Oct 01 14:46:41 crc kubenswrapper[4605]: I1001 14:46:41.872220 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_1c76bd37ad0c2b23ca2a6f6e5550fa40350084769f929d7952643a875emzcsd_67624804-868e-4db5-9eb8-aaec10c2a4ba/pull/0.log" Oct 01 14:46:41 crc kubenswrapper[4605]: I1001 14:46:41.882124 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_1c76bd37ad0c2b23ca2a6f6e5550fa40350084769f929d7952643a875emzcsd_67624804-868e-4db5-9eb8-aaec10c2a4ba/pull/0.log" Oct 01 14:46:41 crc kubenswrapper[4605]: I1001 14:46:41.936149 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a93731-a990-49bd-abda-ef77afb625d8" path="/var/lib/kubelet/pods/57a93731-a990-49bd-abda-ef77afb625d8/volumes" Oct 01 14:46:42 crc kubenswrapper[4605]: I1001 14:46:42.021918 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_1c76bd37ad0c2b23ca2a6f6e5550fa40350084769f929d7952643a875emzcsd_67624804-868e-4db5-9eb8-aaec10c2a4ba/pull/0.log" Oct 01 14:46:42 crc kubenswrapper[4605]: I1001 14:46:42.041815 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_1c76bd37ad0c2b23ca2a6f6e5550fa40350084769f929d7952643a875emzcsd_67624804-868e-4db5-9eb8-aaec10c2a4ba/util/0.log" Oct 01 14:46:42 crc kubenswrapper[4605]: I1001 14:46:42.093218 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_1c76bd37ad0c2b23ca2a6f6e5550fa40350084769f929d7952643a875emzcsd_67624804-868e-4db5-9eb8-aaec10c2a4ba/extract/0.log" Oct 01 14:46:42 crc kubenswrapper[4605]: I1001 14:46:42.189400 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-6ff8b75857-td7cl_1c52463d-7f43-422b-b6f2-071553e4efb1/kube-rbac-proxy/0.log" Oct 01 14:46:42 crc kubenswrapper[4605]: I1001 14:46:42.311529 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-6ff8b75857-td7cl_1c52463d-7f43-422b-b6f2-071553e4efb1/manager/0.log" Oct 01 14:46:42 crc kubenswrapper[4605]: I1001 14:46:42.336706 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-644bddb6d8-l6rjg_ddfed60b-8b0b-4481-b9f7-f906dd6413f8/kube-rbac-proxy/0.log" Oct 01 14:46:42 crc kubenswrapper[4605]: I1001 14:46:42.448812 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-644bddb6d8-l6rjg_ddfed60b-8b0b-4481-b9f7-f906dd6413f8/manager/0.log" Oct 01 14:46:42 crc kubenswrapper[4605]: I1001 14:46:42.555469 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-84f4f7b77b-k4j9s_2f85ca51-dac6-464b-8da5-b2b35511c3a7/kube-rbac-proxy/0.log" Oct 01 14:46:42 crc kubenswrapper[4605]: I1001 14:46:42.558024 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-84f4f7b77b-k4j9s_2f85ca51-dac6-464b-8da5-b2b35511c3a7/manager/0.log" Oct 01 14:46:42 crc kubenswrapper[4605]: I1001 14:46:42.722298 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-84958c4d49-r2t7s_685ab06d-d56b-429c-b196-3f2576a63ad5/kube-rbac-proxy/0.log" Oct 01 14:46:42 crc kubenswrapper[4605]: I1001 14:46:42.812285 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-84958c4d49-r2t7s_685ab06d-d56b-429c-b196-3f2576a63ad5/manager/0.log" Oct 01 14:46:42 crc kubenswrapper[4605]: I1001 14:46:42.936158 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5d889d78cf-bc7cq_6728814d-8d86-4255-8e33-c2205cc3421b/manager/0.log" Oct 01 14:46:42 crc kubenswrapper[4605]: I1001 14:46:42.946420 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5d889d78cf-bc7cq_6728814d-8d86-4255-8e33-c2205cc3421b/kube-rbac-proxy/0.log" Oct 01 14:46:43 crc kubenswrapper[4605]: I1001 14:46:43.107367 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-9f4696d94-4gzrq_114ac89a-6b52-4e58-8ec7-1a5ebe953e46/kube-rbac-proxy/0.log" Oct 01 14:46:43 crc kubenswrapper[4605]: I1001 14:46:43.161407 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-9f4696d94-4gzrq_114ac89a-6b52-4e58-8ec7-1a5ebe953e46/manager/0.log" Oct 01 14:46:43 crc kubenswrapper[4605]: I1001 14:46:43.227264 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-9d6c5db85-p5vqs_769cd151-8943-4faa-876c-e91d749ef107/kube-rbac-proxy/0.log" Oct 01 14:46:43 crc kubenswrapper[4605]: I1001 14:46:43.472371 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-5cd4858477-sb7tm_a3fbdb59-b188-4842-af73-d3c68afd58ff/kube-rbac-proxy/0.log" Oct 01 14:46:43 crc kubenswrapper[4605]: I1001 14:46:43.481286 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-9d6c5db85-p5vqs_769cd151-8943-4faa-876c-e91d749ef107/manager/0.log" Oct 01 14:46:43 crc kubenswrapper[4605]: I1001 14:46:43.523986 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-5cd4858477-sb7tm_a3fbdb59-b188-4842-af73-d3c68afd58ff/manager/0.log" Oct 01 14:46:43 crc kubenswrapper[4605]: I1001 14:46:43.715561 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-665ff6bffd-fdn7m_416364d4-8fac-4979-b4f1-e1f009f0b8cd/kube-rbac-proxy/0.log" Oct 01 14:46:43 crc kubenswrapper[4605]: I1001 14:46:43.787257 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-665ff6bffd-fdn7m_416364d4-8fac-4979-b4f1-e1f009f0b8cd/manager/0.log" Oct 01 14:46:43 crc kubenswrapper[4605]: I1001 14:46:43.866916 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-6d68dbc695-kxwrs_e578b0b7-de64-4492-9ab3-b8b73ebd0909/kube-rbac-proxy/0.log" Oct 01 14:46:43 crc kubenswrapper[4605]: I1001 14:46:43.908326 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-6d68dbc695-kxwrs_e578b0b7-de64-4492-9ab3-b8b73ebd0909/manager/0.log" Oct 01 14:46:44 crc kubenswrapper[4605]: I1001 14:46:44.011711 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-88c7-rscdq_5db7ac6b-c1e1-4640-943f-9db9a460e625/kube-rbac-proxy/0.log" Oct 01 14:46:44 crc kubenswrapper[4605]: I1001 14:46:44.065690 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-88c7-rscdq_5db7ac6b-c1e1-4640-943f-9db9a460e625/manager/0.log" Oct 01 14:46:44 crc kubenswrapper[4605]: I1001 14:46:44.305755 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-849d5b9b84-b2fzm_6ee4d18f-3f02-49c4-943c-534e47601be5/kube-rbac-proxy/0.log" Oct 01 14:46:44 crc kubenswrapper[4605]: I1001 14:46:44.358975 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-849d5b9b84-b2fzm_6ee4d18f-3f02-49c4-943c-534e47601be5/manager/0.log" Oct 01 14:46:44 crc kubenswrapper[4605]: I1001 14:46:44.440662 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-64cd67b5cb-xrh9t_21d80fce-11c1-4ca0-8687-dc2bb6ced356/kube-rbac-proxy/0.log" Oct 01 14:46:44 crc kubenswrapper[4605]: I1001 14:46:44.586651 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-7b787867f4-cx7q7_f105c6d3-5a2b-442c-ad1c-bcffd3fd869b/kube-rbac-proxy/0.log" Oct 01 14:46:44 crc kubenswrapper[4605]: I1001 14:46:44.677855 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-64cd67b5cb-xrh9t_21d80fce-11c1-4ca0-8687-dc2bb6ced356/manager/0.log" Oct 01 14:46:44 crc kubenswrapper[4605]: I1001 14:46:44.721989 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-7b787867f4-cx7q7_f105c6d3-5a2b-442c-ad1c-bcffd3fd869b/manager/0.log" Oct 01 14:46:44 crc kubenswrapper[4605]: I1001 14:46:44.882133 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-77b9676b8cqd6pb_4c039eff-8d65-45d5-9c1a-9fddca3c5e57/manager/0.log" Oct 01 14:46:44 crc kubenswrapper[4605]: I1001 14:46:44.883730 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-77b9676b8cqd6pb_4c039eff-8d65-45d5-9c1a-9fddca3c5e57/kube-rbac-proxy/0.log" Oct 01 14:46:45 crc kubenswrapper[4605]: I1001 14:46:45.036844 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-6565f9cdf-zgq4z_7ba2ae30-a4df-43a7-b6bc-89814bd65ab7/kube-rbac-proxy/0.log" Oct 01 14:46:45 crc kubenswrapper[4605]: I1001 14:46:45.211201 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-5b5f977c9c-ztrlb_471dd8b1-617a-4cba-a9e6-0fc59bfc4b6a/kube-rbac-proxy/0.log" Oct 01 14:46:45 crc kubenswrapper[4605]: I1001 14:46:45.510157 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-8vx9r_c2be49ad-0f11-4479-b725-29854a0c1b8f/registry-server/0.log" Oct 01 14:46:45 crc kubenswrapper[4605]: I1001 14:46:45.550336 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-5b5f977c9c-ztrlb_471dd8b1-617a-4cba-a9e6-0fc59bfc4b6a/operator/0.log" Oct 01 14:46:45 crc kubenswrapper[4605]: I1001 14:46:45.675163 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-9976ff44c-8sgk8_d861e141-379a-4d47-bca7-bff86972afaa/kube-rbac-proxy/0.log" Oct 01 14:46:45 crc kubenswrapper[4605]: I1001 14:46:45.842834 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-9976ff44c-8sgk8_d861e141-379a-4d47-bca7-bff86972afaa/manager/0.log" Oct 01 14:46:45 crc kubenswrapper[4605]: I1001 14:46:45.911902 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-589c58c6c-tf7ln_97fdc8f2-0472-4957-a59a-fd9474c0d15c/kube-rbac-proxy/0.log" Oct 01 14:46:45 crc kubenswrapper[4605]: I1001 14:46:45.988713 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-589c58c6c-tf7ln_97fdc8f2-0472-4957-a59a-fd9474c0d15c/manager/0.log" Oct 01 14:46:46 crc kubenswrapper[4605]: I1001 14:46:46.107942 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-6565f9cdf-zgq4z_7ba2ae30-a4df-43a7-b6bc-89814bd65ab7/manager/0.log" Oct 01 14:46:46 crc kubenswrapper[4605]: I1001 14:46:46.217950 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-5f97d8c699-9dhn9_0de7ac6f-ac16-4f83-8e06-10c9b2500491/operator/0.log" Oct 01 14:46:46 crc kubenswrapper[4605]: I1001 14:46:46.222426 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-84d6b4b759-xdntp_34738360-dd91-4a55-b6d2-ab69d1bb5db4/kube-rbac-proxy/0.log" Oct 01 14:46:46 crc kubenswrapper[4605]: I1001 14:46:46.348497 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-84d6b4b759-xdntp_34738360-dd91-4a55-b6d2-ab69d1bb5db4/manager/0.log" Oct 01 14:46:46 crc kubenswrapper[4605]: I1001 14:46:46.421118 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-b8d54b5d7-x8fvl_8dcbd1dd-75c6-40ff-a9ea-267f9be92433/kube-rbac-proxy/0.log" Oct 01 14:46:46 crc kubenswrapper[4605]: I1001 14:46:46.431576 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-b8d54b5d7-x8fvl_8dcbd1dd-75c6-40ff-a9ea-267f9be92433/manager/0.log" Oct 01 14:46:46 crc kubenswrapper[4605]: I1001 14:46:46.564272 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-85777745bb-8rcr7_6e215b5d-0b5c-4587-b8ab-f2a63fb41cb1/kube-rbac-proxy/0.log" Oct 01 14:46:46 crc kubenswrapper[4605]: I1001 14:46:46.573900 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-85777745bb-8rcr7_6e215b5d-0b5c-4587-b8ab-f2a63fb41cb1/manager/0.log" Oct 01 14:46:46 crc kubenswrapper[4605]: I1001 14:46:46.598212 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-6b9957f54f-ct9tb_baf8d82f-41e9-417a-9e88-4320b65d7c6c/kube-rbac-proxy/0.log" Oct 01 14:46:46 crc kubenswrapper[4605]: I1001 14:46:46.674219 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-6b9957f54f-ct9tb_baf8d82f-41e9-417a-9e88-4320b65d7c6c/manager/0.log" Oct 01 14:46:48 crc kubenswrapper[4605]: I1001 14:46:48.927160 4605 scope.go:117] "RemoveContainer" containerID="47fcc9d09b30d5b1909f661246ae31a622aeee683f33f29650e6dae968163ac2" Oct 01 14:46:48 crc kubenswrapper[4605]: E1001 14:46:48.927604 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:47:01 crc kubenswrapper[4605]: I1001 14:47:01.926657 4605 scope.go:117] "RemoveContainer" containerID="47fcc9d09b30d5b1909f661246ae31a622aeee683f33f29650e6dae968163ac2" Oct 01 14:47:01 crc kubenswrapper[4605]: E1001 14:47:01.928651 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:47:03 crc kubenswrapper[4605]: I1001 14:47:03.391758 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-pwm5c_16842335-be0d-4f69-b0af-e98b21c572ab/control-plane-machine-set-operator/0.log" Oct 01 14:47:03 crc kubenswrapper[4605]: I1001 14:47:03.553701 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-2k22x_762e836a-1722-4e01-982d-023b84748aa4/kube-rbac-proxy/0.log" Oct 01 14:47:03 crc kubenswrapper[4605]: I1001 14:47:03.632448 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-2k22x_762e836a-1722-4e01-982d-023b84748aa4/machine-api-operator/0.log" Oct 01 14:47:12 crc kubenswrapper[4605]: I1001 14:47:12.927356 4605 scope.go:117] "RemoveContainer" containerID="47fcc9d09b30d5b1909f661246ae31a622aeee683f33f29650e6dae968163ac2" Oct 01 14:47:12 crc kubenswrapper[4605]: E1001 14:47:12.927945 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:47:19 crc kubenswrapper[4605]: I1001 14:47:19.799552 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-rt7m9_ce49c1e2-5b54-442f-9c7c-5242886a218a/cert-manager-controller/0.log" Oct 01 14:47:20 crc kubenswrapper[4605]: I1001 14:47:20.029219 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-7cs6w_365338db-6cc8-4281-a9a8-665a9c64a850/cert-manager-cainjector/0.log" Oct 01 14:47:20 crc kubenswrapper[4605]: I1001 14:47:20.203383 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-478v7_0fccd814-2572-49cb-b325-549214e05fc2/cert-manager-webhook/0.log" Oct 01 14:47:26 crc kubenswrapper[4605]: I1001 14:47:26.927165 4605 scope.go:117] "RemoveContainer" containerID="47fcc9d09b30d5b1909f661246ae31a622aeee683f33f29650e6dae968163ac2" Oct 01 14:47:26 crc kubenswrapper[4605]: E1001 14:47:26.927728 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:47:34 crc kubenswrapper[4605]: I1001 14:47:34.992938 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-864bb6dfb5-w9zqn_cceecdf3-965b-4939-a871-628e73d1ce1e/nmstate-console-plugin/0.log" Oct 01 14:47:35 crc kubenswrapper[4605]: I1001 14:47:35.245859 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-txcp9_d9977c55-dbd0-45e6-8483-5b0e5a279566/nmstate-handler/0.log" Oct 01 14:47:35 crc kubenswrapper[4605]: I1001 14:47:35.295711 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-58fcddf996-k28lm_8f826dfa-48c9-42ed-8f62-e3ae00653a07/kube-rbac-proxy/0.log" Oct 01 14:47:35 crc kubenswrapper[4605]: I1001 14:47:35.379979 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-58fcddf996-k28lm_8f826dfa-48c9-42ed-8f62-e3ae00653a07/nmstate-metrics/0.log" Oct 01 14:47:35 crc kubenswrapper[4605]: I1001 14:47:35.458814 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5d6f6cfd66-mmf2p_fc364b6e-b66d-4634-890a-f2eaed00901e/nmstate-operator/0.log" Oct 01 14:47:35 crc kubenswrapper[4605]: I1001 14:47:35.581844 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-6d689559c5-dgsw9_f75cf276-8bcb-4ab4-bed4-d12b1252691f/nmstate-webhook/0.log" Oct 01 14:47:37 crc kubenswrapper[4605]: I1001 14:47:37.935847 4605 scope.go:117] "RemoveContainer" containerID="47fcc9d09b30d5b1909f661246ae31a622aeee683f33f29650e6dae968163ac2" Oct 01 14:47:37 crc kubenswrapper[4605]: E1001 14:47:37.936414 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:47:49 crc kubenswrapper[4605]: I1001 14:47:49.926751 4605 scope.go:117] "RemoveContainer" containerID="47fcc9d09b30d5b1909f661246ae31a622aeee683f33f29650e6dae968163ac2" Oct 01 14:47:49 crc kubenswrapper[4605]: E1001 14:47:49.927548 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:47:49 crc kubenswrapper[4605]: I1001 14:47:49.999347 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-5d688f5ffc-jlb75_c9b364bc-86c5-4b79-b38a-d8bbf447be04/kube-rbac-proxy/0.log" Oct 01 14:47:50 crc kubenswrapper[4605]: I1001 14:47:50.050130 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-5d688f5ffc-jlb75_c9b364bc-86c5-4b79-b38a-d8bbf447be04/controller/0.log" Oct 01 14:47:50 crc kubenswrapper[4605]: I1001 14:47:50.245592 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6g94f_3476bcf6-f4c4-4db8-8ec9-b567c7d55872/cp-frr-files/0.log" Oct 01 14:47:50 crc kubenswrapper[4605]: I1001 14:47:50.405290 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6g94f_3476bcf6-f4c4-4db8-8ec9-b567c7d55872/cp-reloader/0.log" Oct 01 14:47:50 crc kubenswrapper[4605]: I1001 14:47:50.415106 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6g94f_3476bcf6-f4c4-4db8-8ec9-b567c7d55872/cp-frr-files/0.log" Oct 01 14:47:50 crc kubenswrapper[4605]: I1001 14:47:50.439875 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6g94f_3476bcf6-f4c4-4db8-8ec9-b567c7d55872/cp-metrics/0.log" Oct 01 14:47:50 crc kubenswrapper[4605]: I1001 14:47:50.443327 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6g94f_3476bcf6-f4c4-4db8-8ec9-b567c7d55872/cp-reloader/0.log" Oct 01 14:47:50 crc kubenswrapper[4605]: I1001 14:47:50.657443 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6g94f_3476bcf6-f4c4-4db8-8ec9-b567c7d55872/cp-frr-files/0.log" Oct 01 14:47:50 crc kubenswrapper[4605]: I1001 14:47:50.708499 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6g94f_3476bcf6-f4c4-4db8-8ec9-b567c7d55872/cp-reloader/0.log" Oct 01 14:47:50 crc kubenswrapper[4605]: I1001 14:47:50.748261 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6g94f_3476bcf6-f4c4-4db8-8ec9-b567c7d55872/cp-metrics/0.log" Oct 01 14:47:50 crc kubenswrapper[4605]: I1001 14:47:50.748678 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6g94f_3476bcf6-f4c4-4db8-8ec9-b567c7d55872/cp-metrics/0.log" Oct 01 14:47:50 crc kubenswrapper[4605]: I1001 14:47:50.890926 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6g94f_3476bcf6-f4c4-4db8-8ec9-b567c7d55872/cp-frr-files/0.log" Oct 01 14:47:50 crc kubenswrapper[4605]: I1001 14:47:50.948819 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6g94f_3476bcf6-f4c4-4db8-8ec9-b567c7d55872/cp-reloader/0.log" Oct 01 14:47:50 crc kubenswrapper[4605]: I1001 14:47:50.989061 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6g94f_3476bcf6-f4c4-4db8-8ec9-b567c7d55872/cp-metrics/0.log" Oct 01 14:47:51 crc kubenswrapper[4605]: I1001 14:47:51.010012 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6g94f_3476bcf6-f4c4-4db8-8ec9-b567c7d55872/controller/0.log" Oct 01 14:47:51 crc kubenswrapper[4605]: I1001 14:47:51.204065 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6g94f_3476bcf6-f4c4-4db8-8ec9-b567c7d55872/frr-metrics/0.log" Oct 01 14:47:51 crc kubenswrapper[4605]: I1001 14:47:51.258218 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6g94f_3476bcf6-f4c4-4db8-8ec9-b567c7d55872/kube-rbac-proxy/0.log" Oct 01 14:47:51 crc kubenswrapper[4605]: I1001 14:47:51.298565 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6g94f_3476bcf6-f4c4-4db8-8ec9-b567c7d55872/kube-rbac-proxy-frr/0.log" Oct 01 14:47:51 crc kubenswrapper[4605]: I1001 14:47:51.432834 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6g94f_3476bcf6-f4c4-4db8-8ec9-b567c7d55872/reloader/0.log" Oct 01 14:47:51 crc kubenswrapper[4605]: I1001 14:47:51.594199 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-5478bdb765-bxrk6_3c7dd5c0-7119-4a6a-838d-a41cc422a655/frr-k8s-webhook-server/0.log" Oct 01 14:47:51 crc kubenswrapper[4605]: I1001 14:47:51.900939 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-b7f675c95-dx7nr_3fa2d3ac-c6f5-40c4-96e7-88eaa6d1622c/manager/0.log" Oct 01 14:47:51 crc kubenswrapper[4605]: I1001 14:47:51.980978 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-6b6874df99-s8qkk_d59274d2-8c52-4997-8e43-aab1e6f5ddd0/webhook-server/0.log" Oct 01 14:47:52 crc kubenswrapper[4605]: I1001 14:47:52.136891 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-xrzw2_369f1f43-8697-402d-a370-f72c820ddf13/kube-rbac-proxy/0.log" Oct 01 14:47:52 crc kubenswrapper[4605]: I1001 14:47:52.292723 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6g94f_3476bcf6-f4c4-4db8-8ec9-b567c7d55872/frr/0.log" Oct 01 14:47:52 crc kubenswrapper[4605]: I1001 14:47:52.595988 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-xrzw2_369f1f43-8697-402d-a370-f72c820ddf13/speaker/0.log" Oct 01 14:48:03 crc kubenswrapper[4605]: I1001 14:48:03.478950 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvrh9j_8542ad4d-301a-4957-ab3f-1c305ad1ff43/util/0.log" Oct 01 14:48:03 crc kubenswrapper[4605]: I1001 14:48:03.628940 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvrh9j_8542ad4d-301a-4957-ab3f-1c305ad1ff43/util/0.log" Oct 01 14:48:03 crc kubenswrapper[4605]: I1001 14:48:03.658701 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvrh9j_8542ad4d-301a-4957-ab3f-1c305ad1ff43/pull/0.log" Oct 01 14:48:03 crc kubenswrapper[4605]: I1001 14:48:03.727362 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvrh9j_8542ad4d-301a-4957-ab3f-1c305ad1ff43/pull/0.log" Oct 01 14:48:03 crc kubenswrapper[4605]: I1001 14:48:03.908236 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvrh9j_8542ad4d-301a-4957-ab3f-1c305ad1ff43/extract/0.log" Oct 01 14:48:03 crc kubenswrapper[4605]: I1001 14:48:03.914119 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvrh9j_8542ad4d-301a-4957-ab3f-1c305ad1ff43/pull/0.log" Oct 01 14:48:03 crc kubenswrapper[4605]: I1001 14:48:03.949458 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcvrh9j_8542ad4d-301a-4957-ab3f-1c305ad1ff43/util/0.log" Oct 01 14:48:04 crc kubenswrapper[4605]: I1001 14:48:04.098320 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-rdmzw_4e85ab9d-cf8d-4814-939c-c779c53bfa45/extract-utilities/0.log" Oct 01 14:48:04 crc kubenswrapper[4605]: I1001 14:48:04.474165 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-rdmzw_4e85ab9d-cf8d-4814-939c-c779c53bfa45/extract-utilities/0.log" Oct 01 14:48:04 crc kubenswrapper[4605]: I1001 14:48:04.513943 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-rdmzw_4e85ab9d-cf8d-4814-939c-c779c53bfa45/extract-content/0.log" Oct 01 14:48:04 crc kubenswrapper[4605]: I1001 14:48:04.522949 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-rdmzw_4e85ab9d-cf8d-4814-939c-c779c53bfa45/extract-content/0.log" Oct 01 14:48:04 crc kubenswrapper[4605]: I1001 14:48:04.708918 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-rdmzw_4e85ab9d-cf8d-4814-939c-c779c53bfa45/extract-content/0.log" Oct 01 14:48:04 crc kubenswrapper[4605]: I1001 14:48:04.709721 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-rdmzw_4e85ab9d-cf8d-4814-939c-c779c53bfa45/extract-utilities/0.log" Oct 01 14:48:04 crc kubenswrapper[4605]: I1001 14:48:04.926256 4605 scope.go:117] "RemoveContainer" containerID="47fcc9d09b30d5b1909f661246ae31a622aeee683f33f29650e6dae968163ac2" Oct 01 14:48:04 crc kubenswrapper[4605]: E1001 14:48:04.926773 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:48:04 crc kubenswrapper[4605]: I1001 14:48:04.965199 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-7gnm7_46973963-c69a-460a-a5c3-3711005e4e00/extract-utilities/0.log" Oct 01 14:48:05 crc kubenswrapper[4605]: I1001 14:48:05.180206 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-rdmzw_4e85ab9d-cf8d-4814-939c-c779c53bfa45/registry-server/0.log" Oct 01 14:48:05 crc kubenswrapper[4605]: I1001 14:48:05.226765 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-7gnm7_46973963-c69a-460a-a5c3-3711005e4e00/extract-utilities/0.log" Oct 01 14:48:05 crc kubenswrapper[4605]: I1001 14:48:05.270922 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-7gnm7_46973963-c69a-460a-a5c3-3711005e4e00/extract-content/0.log" Oct 01 14:48:05 crc kubenswrapper[4605]: I1001 14:48:05.293653 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-7gnm7_46973963-c69a-460a-a5c3-3711005e4e00/extract-content/0.log" Oct 01 14:48:05 crc kubenswrapper[4605]: I1001 14:48:05.504486 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-7gnm7_46973963-c69a-460a-a5c3-3711005e4e00/extract-utilities/0.log" Oct 01 14:48:05 crc kubenswrapper[4605]: I1001 14:48:05.545431 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-7gnm7_46973963-c69a-460a-a5c3-3711005e4e00/extract-content/0.log" Oct 01 14:48:05 crc kubenswrapper[4605]: I1001 14:48:05.736158 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96clpr7_81bb3cba-46ef-47f6-8f79-326bd240dc58/util/0.log" Oct 01 14:48:06 crc kubenswrapper[4605]: I1001 14:48:06.046836 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-7gnm7_46973963-c69a-460a-a5c3-3711005e4e00/registry-server/0.log" Oct 01 14:48:06 crc kubenswrapper[4605]: I1001 14:48:06.093585 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96clpr7_81bb3cba-46ef-47f6-8f79-326bd240dc58/util/0.log" Oct 01 14:48:06 crc kubenswrapper[4605]: I1001 14:48:06.128582 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96clpr7_81bb3cba-46ef-47f6-8f79-326bd240dc58/pull/0.log" Oct 01 14:48:06 crc kubenswrapper[4605]: I1001 14:48:06.152578 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96clpr7_81bb3cba-46ef-47f6-8f79-326bd240dc58/pull/0.log" Oct 01 14:48:06 crc kubenswrapper[4605]: I1001 14:48:06.304632 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96clpr7_81bb3cba-46ef-47f6-8f79-326bd240dc58/util/0.log" Oct 01 14:48:06 crc kubenswrapper[4605]: I1001 14:48:06.323512 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96clpr7_81bb3cba-46ef-47f6-8f79-326bd240dc58/extract/0.log" Oct 01 14:48:06 crc kubenswrapper[4605]: I1001 14:48:06.407550 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96clpr7_81bb3cba-46ef-47f6-8f79-326bd240dc58/pull/0.log" Oct 01 14:48:06 crc kubenswrapper[4605]: I1001 14:48:06.548156 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-fhshk_78062175-5452-4b18-96df-c602188693fb/marketplace-operator/0.log" Oct 01 14:48:06 crc kubenswrapper[4605]: I1001 14:48:06.647570 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-xgctj_6a5db63d-46d4-4967-b16e-5ee3222617d0/extract-utilities/0.log" Oct 01 14:48:06 crc kubenswrapper[4605]: I1001 14:48:06.819405 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-xgctj_6a5db63d-46d4-4967-b16e-5ee3222617d0/extract-content/0.log" Oct 01 14:48:06 crc kubenswrapper[4605]: I1001 14:48:06.844826 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-xgctj_6a5db63d-46d4-4967-b16e-5ee3222617d0/extract-content/0.log" Oct 01 14:48:06 crc kubenswrapper[4605]: I1001 14:48:06.869252 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-xgctj_6a5db63d-46d4-4967-b16e-5ee3222617d0/extract-utilities/0.log" Oct 01 14:48:06 crc kubenswrapper[4605]: I1001 14:48:06.996255 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-xgctj_6a5db63d-46d4-4967-b16e-5ee3222617d0/extract-utilities/0.log" Oct 01 14:48:07 crc kubenswrapper[4605]: I1001 14:48:07.019388 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-xgctj_6a5db63d-46d4-4967-b16e-5ee3222617d0/extract-content/0.log" Oct 01 14:48:07 crc kubenswrapper[4605]: I1001 14:48:07.154035 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-xgctj_6a5db63d-46d4-4967-b16e-5ee3222617d0/registry-server/0.log" Oct 01 14:48:07 crc kubenswrapper[4605]: I1001 14:48:07.224928 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-fw6xk_d5682155-2c45-4654-b77a-75760c61c945/extract-utilities/0.log" Oct 01 14:48:07 crc kubenswrapper[4605]: I1001 14:48:07.434994 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-fw6xk_d5682155-2c45-4654-b77a-75760c61c945/extract-content/0.log" Oct 01 14:48:07 crc kubenswrapper[4605]: I1001 14:48:07.440998 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-fw6xk_d5682155-2c45-4654-b77a-75760c61c945/extract-utilities/0.log" Oct 01 14:48:07 crc kubenswrapper[4605]: I1001 14:48:07.459860 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-fw6xk_d5682155-2c45-4654-b77a-75760c61c945/extract-content/0.log" Oct 01 14:48:07 crc kubenswrapper[4605]: I1001 14:48:07.629746 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-fw6xk_d5682155-2c45-4654-b77a-75760c61c945/extract-content/0.log" Oct 01 14:48:07 crc kubenswrapper[4605]: I1001 14:48:07.630485 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-fw6xk_d5682155-2c45-4654-b77a-75760c61c945/extract-utilities/0.log" Oct 01 14:48:08 crc kubenswrapper[4605]: I1001 14:48:08.139802 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-fw6xk_d5682155-2c45-4654-b77a-75760c61c945/registry-server/0.log" Oct 01 14:48:19 crc kubenswrapper[4605]: I1001 14:48:19.926603 4605 scope.go:117] "RemoveContainer" containerID="47fcc9d09b30d5b1909f661246ae31a622aeee683f33f29650e6dae968163ac2" Oct 01 14:48:19 crc kubenswrapper[4605]: E1001 14:48:19.928191 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:48:33 crc kubenswrapper[4605]: I1001 14:48:33.926706 4605 scope.go:117] "RemoveContainer" containerID="47fcc9d09b30d5b1909f661246ae31a622aeee683f33f29650e6dae968163ac2" Oct 01 14:48:33 crc kubenswrapper[4605]: E1001 14:48:33.927322 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:48:48 crc kubenswrapper[4605]: I1001 14:48:48.927164 4605 scope.go:117] "RemoveContainer" containerID="47fcc9d09b30d5b1909f661246ae31a622aeee683f33f29650e6dae968163ac2" Oct 01 14:48:48 crc kubenswrapper[4605]: E1001 14:48:48.929802 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:49:02 crc kubenswrapper[4605]: I1001 14:49:02.927295 4605 scope.go:117] "RemoveContainer" containerID="47fcc9d09b30d5b1909f661246ae31a622aeee683f33f29650e6dae968163ac2" Oct 01 14:49:02 crc kubenswrapper[4605]: E1001 14:49:02.928175 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:49:13 crc kubenswrapper[4605]: I1001 14:49:13.927133 4605 scope.go:117] "RemoveContainer" containerID="47fcc9d09b30d5b1909f661246ae31a622aeee683f33f29650e6dae968163ac2" Oct 01 14:49:13 crc kubenswrapper[4605]: E1001 14:49:13.928461 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:49:26 crc kubenswrapper[4605]: I1001 14:49:26.927407 4605 scope.go:117] "RemoveContainer" containerID="47fcc9d09b30d5b1909f661246ae31a622aeee683f33f29650e6dae968163ac2" Oct 01 14:49:26 crc kubenswrapper[4605]: E1001 14:49:26.929076 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:49:41 crc kubenswrapper[4605]: I1001 14:49:41.935114 4605 scope.go:117] "RemoveContainer" containerID="47fcc9d09b30d5b1909f661246ae31a622aeee683f33f29650e6dae968163ac2" Oct 01 14:49:41 crc kubenswrapper[4605]: E1001 14:49:41.936554 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:49:55 crc kubenswrapper[4605]: I1001 14:49:55.335425 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-28spx"] Oct 01 14:49:55 crc kubenswrapper[4605]: E1001 14:49:55.336826 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57a93731-a990-49bd-abda-ef77afb625d8" containerName="container-00" Oct 01 14:49:55 crc kubenswrapper[4605]: I1001 14:49:55.336857 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="57a93731-a990-49bd-abda-ef77afb625d8" containerName="container-00" Oct 01 14:49:55 crc kubenswrapper[4605]: I1001 14:49:55.337449 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="57a93731-a990-49bd-abda-ef77afb625d8" containerName="container-00" Oct 01 14:49:55 crc kubenswrapper[4605]: I1001 14:49:55.340059 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-28spx" Oct 01 14:49:55 crc kubenswrapper[4605]: I1001 14:49:55.350676 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-28spx"] Oct 01 14:49:55 crc kubenswrapper[4605]: I1001 14:49:55.498817 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6cqgp\" (UniqueName: \"kubernetes.io/projected/7e89b05e-af2c-41fc-8abc-1ff77175b1a4-kube-api-access-6cqgp\") pod \"certified-operators-28spx\" (UID: \"7e89b05e-af2c-41fc-8abc-1ff77175b1a4\") " pod="openshift-marketplace/certified-operators-28spx" Oct 01 14:49:55 crc kubenswrapper[4605]: I1001 14:49:55.498915 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7e89b05e-af2c-41fc-8abc-1ff77175b1a4-catalog-content\") pod \"certified-operators-28spx\" (UID: \"7e89b05e-af2c-41fc-8abc-1ff77175b1a4\") " pod="openshift-marketplace/certified-operators-28spx" Oct 01 14:49:55 crc kubenswrapper[4605]: I1001 14:49:55.498966 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7e89b05e-af2c-41fc-8abc-1ff77175b1a4-utilities\") pod \"certified-operators-28spx\" (UID: \"7e89b05e-af2c-41fc-8abc-1ff77175b1a4\") " pod="openshift-marketplace/certified-operators-28spx" Oct 01 14:49:55 crc kubenswrapper[4605]: I1001 14:49:55.600136 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7e89b05e-af2c-41fc-8abc-1ff77175b1a4-utilities\") pod \"certified-operators-28spx\" (UID: \"7e89b05e-af2c-41fc-8abc-1ff77175b1a4\") " pod="openshift-marketplace/certified-operators-28spx" Oct 01 14:49:55 crc kubenswrapper[4605]: I1001 14:49:55.600265 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6cqgp\" (UniqueName: \"kubernetes.io/projected/7e89b05e-af2c-41fc-8abc-1ff77175b1a4-kube-api-access-6cqgp\") pod \"certified-operators-28spx\" (UID: \"7e89b05e-af2c-41fc-8abc-1ff77175b1a4\") " pod="openshift-marketplace/certified-operators-28spx" Oct 01 14:49:55 crc kubenswrapper[4605]: I1001 14:49:55.600318 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7e89b05e-af2c-41fc-8abc-1ff77175b1a4-catalog-content\") pod \"certified-operators-28spx\" (UID: \"7e89b05e-af2c-41fc-8abc-1ff77175b1a4\") " pod="openshift-marketplace/certified-operators-28spx" Oct 01 14:49:55 crc kubenswrapper[4605]: I1001 14:49:55.600786 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7e89b05e-af2c-41fc-8abc-1ff77175b1a4-catalog-content\") pod \"certified-operators-28spx\" (UID: \"7e89b05e-af2c-41fc-8abc-1ff77175b1a4\") " pod="openshift-marketplace/certified-operators-28spx" Oct 01 14:49:55 crc kubenswrapper[4605]: I1001 14:49:55.601033 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7e89b05e-af2c-41fc-8abc-1ff77175b1a4-utilities\") pod \"certified-operators-28spx\" (UID: \"7e89b05e-af2c-41fc-8abc-1ff77175b1a4\") " pod="openshift-marketplace/certified-operators-28spx" Oct 01 14:49:55 crc kubenswrapper[4605]: I1001 14:49:55.637891 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6cqgp\" (UniqueName: \"kubernetes.io/projected/7e89b05e-af2c-41fc-8abc-1ff77175b1a4-kube-api-access-6cqgp\") pod \"certified-operators-28spx\" (UID: \"7e89b05e-af2c-41fc-8abc-1ff77175b1a4\") " pod="openshift-marketplace/certified-operators-28spx" Oct 01 14:49:55 crc kubenswrapper[4605]: I1001 14:49:55.680367 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-28spx" Oct 01 14:49:56 crc kubenswrapper[4605]: I1001 14:49:56.397738 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-28spx"] Oct 01 14:49:56 crc kubenswrapper[4605]: I1001 14:49:56.500497 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-28spx" event={"ID":"7e89b05e-af2c-41fc-8abc-1ff77175b1a4","Type":"ContainerStarted","Data":"0abe5db283144476f31d2d178adc08529aac8cc9c60d40f285a73593ec271aef"} Oct 01 14:49:56 crc kubenswrapper[4605]: I1001 14:49:56.926725 4605 scope.go:117] "RemoveContainer" containerID="47fcc9d09b30d5b1909f661246ae31a622aeee683f33f29650e6dae968163ac2" Oct 01 14:49:56 crc kubenswrapper[4605]: E1001 14:49:56.927000 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:49:57 crc kubenswrapper[4605]: I1001 14:49:57.523412 4605 generic.go:334] "Generic (PLEG): container finished" podID="7e89b05e-af2c-41fc-8abc-1ff77175b1a4" containerID="31a8018ce083a1e317760c01f3c2cd035c4dddeb4e743857058998e34d26ff71" exitCode=0 Oct 01 14:49:57 crc kubenswrapper[4605]: I1001 14:49:57.523468 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-28spx" event={"ID":"7e89b05e-af2c-41fc-8abc-1ff77175b1a4","Type":"ContainerDied","Data":"31a8018ce083a1e317760c01f3c2cd035c4dddeb4e743857058998e34d26ff71"} Oct 01 14:49:57 crc kubenswrapper[4605]: I1001 14:49:57.526056 4605 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 01 14:49:57 crc kubenswrapper[4605]: I1001 14:49:57.727357 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-vplqz"] Oct 01 14:49:57 crc kubenswrapper[4605]: I1001 14:49:57.730802 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vplqz" Oct 01 14:49:57 crc kubenswrapper[4605]: I1001 14:49:57.742719 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vplqz"] Oct 01 14:49:57 crc kubenswrapper[4605]: I1001 14:49:57.857546 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7757e2f9-9deb-49ad-aa12-eaca4f054ffc-catalog-content\") pod \"redhat-marketplace-vplqz\" (UID: \"7757e2f9-9deb-49ad-aa12-eaca4f054ffc\") " pod="openshift-marketplace/redhat-marketplace-vplqz" Oct 01 14:49:57 crc kubenswrapper[4605]: I1001 14:49:57.857638 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tntjt\" (UniqueName: \"kubernetes.io/projected/7757e2f9-9deb-49ad-aa12-eaca4f054ffc-kube-api-access-tntjt\") pod \"redhat-marketplace-vplqz\" (UID: \"7757e2f9-9deb-49ad-aa12-eaca4f054ffc\") " pod="openshift-marketplace/redhat-marketplace-vplqz" Oct 01 14:49:57 crc kubenswrapper[4605]: I1001 14:49:57.857675 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7757e2f9-9deb-49ad-aa12-eaca4f054ffc-utilities\") pod \"redhat-marketplace-vplqz\" (UID: \"7757e2f9-9deb-49ad-aa12-eaca4f054ffc\") " pod="openshift-marketplace/redhat-marketplace-vplqz" Oct 01 14:49:57 crc kubenswrapper[4605]: I1001 14:49:57.962299 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tntjt\" (UniqueName: \"kubernetes.io/projected/7757e2f9-9deb-49ad-aa12-eaca4f054ffc-kube-api-access-tntjt\") pod \"redhat-marketplace-vplqz\" (UID: \"7757e2f9-9deb-49ad-aa12-eaca4f054ffc\") " pod="openshift-marketplace/redhat-marketplace-vplqz" Oct 01 14:49:57 crc kubenswrapper[4605]: I1001 14:49:57.962635 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7757e2f9-9deb-49ad-aa12-eaca4f054ffc-utilities\") pod \"redhat-marketplace-vplqz\" (UID: \"7757e2f9-9deb-49ad-aa12-eaca4f054ffc\") " pod="openshift-marketplace/redhat-marketplace-vplqz" Oct 01 14:49:57 crc kubenswrapper[4605]: I1001 14:49:57.962858 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7757e2f9-9deb-49ad-aa12-eaca4f054ffc-catalog-content\") pod \"redhat-marketplace-vplqz\" (UID: \"7757e2f9-9deb-49ad-aa12-eaca4f054ffc\") " pod="openshift-marketplace/redhat-marketplace-vplqz" Oct 01 14:49:57 crc kubenswrapper[4605]: I1001 14:49:57.963519 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7757e2f9-9deb-49ad-aa12-eaca4f054ffc-catalog-content\") pod \"redhat-marketplace-vplqz\" (UID: \"7757e2f9-9deb-49ad-aa12-eaca4f054ffc\") " pod="openshift-marketplace/redhat-marketplace-vplqz" Oct 01 14:49:57 crc kubenswrapper[4605]: I1001 14:49:57.964174 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7757e2f9-9deb-49ad-aa12-eaca4f054ffc-utilities\") pod \"redhat-marketplace-vplqz\" (UID: \"7757e2f9-9deb-49ad-aa12-eaca4f054ffc\") " pod="openshift-marketplace/redhat-marketplace-vplqz" Oct 01 14:49:57 crc kubenswrapper[4605]: I1001 14:49:57.984650 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tntjt\" (UniqueName: \"kubernetes.io/projected/7757e2f9-9deb-49ad-aa12-eaca4f054ffc-kube-api-access-tntjt\") pod \"redhat-marketplace-vplqz\" (UID: \"7757e2f9-9deb-49ad-aa12-eaca4f054ffc\") " pod="openshift-marketplace/redhat-marketplace-vplqz" Oct 01 14:49:58 crc kubenswrapper[4605]: I1001 14:49:58.076276 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vplqz" Oct 01 14:49:58 crc kubenswrapper[4605]: I1001 14:49:58.535585 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vplqz"] Oct 01 14:49:58 crc kubenswrapper[4605]: I1001 14:49:58.538900 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-28spx" event={"ID":"7e89b05e-af2c-41fc-8abc-1ff77175b1a4","Type":"ContainerStarted","Data":"a685f33b176f449f1b3aecb50c2e231a78b406f4aab7f30523addb78d7820bc4"} Oct 01 14:49:58 crc kubenswrapper[4605]: W1001 14:49:58.550537 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7757e2f9_9deb_49ad_aa12_eaca4f054ffc.slice/crio-a47f1a5e93f2d444e7b73ca1c8af97eed0a4a24dc49e7798fa61e06fe236ab46 WatchSource:0}: Error finding container a47f1a5e93f2d444e7b73ca1c8af97eed0a4a24dc49e7798fa61e06fe236ab46: Status 404 returned error can't find the container with id a47f1a5e93f2d444e7b73ca1c8af97eed0a4a24dc49e7798fa61e06fe236ab46 Oct 01 14:49:59 crc kubenswrapper[4605]: I1001 14:49:59.548023 4605 generic.go:334] "Generic (PLEG): container finished" podID="7e89b05e-af2c-41fc-8abc-1ff77175b1a4" containerID="a685f33b176f449f1b3aecb50c2e231a78b406f4aab7f30523addb78d7820bc4" exitCode=0 Oct 01 14:49:59 crc kubenswrapper[4605]: I1001 14:49:59.548054 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-28spx" event={"ID":"7e89b05e-af2c-41fc-8abc-1ff77175b1a4","Type":"ContainerDied","Data":"a685f33b176f449f1b3aecb50c2e231a78b406f4aab7f30523addb78d7820bc4"} Oct 01 14:49:59 crc kubenswrapper[4605]: I1001 14:49:59.550164 4605 generic.go:334] "Generic (PLEG): container finished" podID="7757e2f9-9deb-49ad-aa12-eaca4f054ffc" containerID="739f4809bd4e85584bc8c4e64b5598132bdb437c516ca26265e1df9635165f1f" exitCode=0 Oct 01 14:49:59 crc kubenswrapper[4605]: I1001 14:49:59.550202 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vplqz" event={"ID":"7757e2f9-9deb-49ad-aa12-eaca4f054ffc","Type":"ContainerDied","Data":"739f4809bd4e85584bc8c4e64b5598132bdb437c516ca26265e1df9635165f1f"} Oct 01 14:49:59 crc kubenswrapper[4605]: I1001 14:49:59.550225 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vplqz" event={"ID":"7757e2f9-9deb-49ad-aa12-eaca4f054ffc","Type":"ContainerStarted","Data":"a47f1a5e93f2d444e7b73ca1c8af97eed0a4a24dc49e7798fa61e06fe236ab46"} Oct 01 14:50:00 crc kubenswrapper[4605]: I1001 14:50:00.562205 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-28spx" event={"ID":"7e89b05e-af2c-41fc-8abc-1ff77175b1a4","Type":"ContainerStarted","Data":"c672b55d99d7053584562201b855d92927210d90465134aa0282d6ef440d2604"} Oct 01 14:50:00 crc kubenswrapper[4605]: I1001 14:50:00.565233 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vplqz" event={"ID":"7757e2f9-9deb-49ad-aa12-eaca4f054ffc","Type":"ContainerStarted","Data":"d4d5b840ab56920ca962f25776211ca89cbac61f47052c13565373ffc397d102"} Oct 01 14:50:00 crc kubenswrapper[4605]: I1001 14:50:00.593039 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-28spx" podStartSLOduration=3.15765809 podStartE2EDuration="5.593021032s" podCreationTimestamp="2025-10-01 14:49:55 +0000 UTC" firstStartedPulling="2025-10-01 14:49:57.525772887 +0000 UTC m=+3920.269749095" lastFinishedPulling="2025-10-01 14:49:59.961135829 +0000 UTC m=+3922.705112037" observedRunningTime="2025-10-01 14:50:00.588041187 +0000 UTC m=+3923.332017395" watchObservedRunningTime="2025-10-01 14:50:00.593021032 +0000 UTC m=+3923.336997240" Oct 01 14:50:01 crc kubenswrapper[4605]: I1001 14:50:01.577789 4605 generic.go:334] "Generic (PLEG): container finished" podID="7757e2f9-9deb-49ad-aa12-eaca4f054ffc" containerID="d4d5b840ab56920ca962f25776211ca89cbac61f47052c13565373ffc397d102" exitCode=0 Oct 01 14:50:01 crc kubenswrapper[4605]: I1001 14:50:01.577879 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vplqz" event={"ID":"7757e2f9-9deb-49ad-aa12-eaca4f054ffc","Type":"ContainerDied","Data":"d4d5b840ab56920ca962f25776211ca89cbac61f47052c13565373ffc397d102"} Oct 01 14:50:02 crc kubenswrapper[4605]: I1001 14:50:02.590878 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vplqz" event={"ID":"7757e2f9-9deb-49ad-aa12-eaca4f054ffc","Type":"ContainerStarted","Data":"7a3dd6f55c2fc8d1502416e27a1f72815e0da746f5384d7c1c5beb0ebc94c2bd"} Oct 01 14:50:02 crc kubenswrapper[4605]: I1001 14:50:02.617233 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-vplqz" podStartSLOduration=2.867241246 podStartE2EDuration="5.617213737s" podCreationTimestamp="2025-10-01 14:49:57 +0000 UTC" firstStartedPulling="2025-10-01 14:49:59.551611223 +0000 UTC m=+3922.295587431" lastFinishedPulling="2025-10-01 14:50:02.301583714 +0000 UTC m=+3925.045559922" observedRunningTime="2025-10-01 14:50:02.606802685 +0000 UTC m=+3925.350778893" watchObservedRunningTime="2025-10-01 14:50:02.617213737 +0000 UTC m=+3925.361189945" Oct 01 14:50:05 crc kubenswrapper[4605]: I1001 14:50:05.681447 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-28spx" Oct 01 14:50:05 crc kubenswrapper[4605]: I1001 14:50:05.683383 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-28spx" Oct 01 14:50:05 crc kubenswrapper[4605]: I1001 14:50:05.745184 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-28spx" Oct 01 14:50:06 crc kubenswrapper[4605]: I1001 14:50:06.673909 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-28spx" Oct 01 14:50:06 crc kubenswrapper[4605]: I1001 14:50:06.736073 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-28spx"] Oct 01 14:50:08 crc kubenswrapper[4605]: I1001 14:50:08.076914 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-vplqz" Oct 01 14:50:08 crc kubenswrapper[4605]: I1001 14:50:08.076956 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-vplqz" Oct 01 14:50:08 crc kubenswrapper[4605]: I1001 14:50:08.133049 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-vplqz" Oct 01 14:50:08 crc kubenswrapper[4605]: I1001 14:50:08.647772 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-28spx" podUID="7e89b05e-af2c-41fc-8abc-1ff77175b1a4" containerName="registry-server" containerID="cri-o://c672b55d99d7053584562201b855d92927210d90465134aa0282d6ef440d2604" gracePeriod=2 Oct 01 14:50:08 crc kubenswrapper[4605]: I1001 14:50:08.724310 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-vplqz" Oct 01 14:50:08 crc kubenswrapper[4605]: I1001 14:50:08.927261 4605 scope.go:117] "RemoveContainer" containerID="47fcc9d09b30d5b1909f661246ae31a622aeee683f33f29650e6dae968163ac2" Oct 01 14:50:08 crc kubenswrapper[4605]: E1001 14:50:08.929068 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:50:09 crc kubenswrapper[4605]: I1001 14:50:09.138881 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vplqz"] Oct 01 14:50:09 crc kubenswrapper[4605]: I1001 14:50:09.149396 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-28spx" Oct 01 14:50:09 crc kubenswrapper[4605]: I1001 14:50:09.297320 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6cqgp\" (UniqueName: \"kubernetes.io/projected/7e89b05e-af2c-41fc-8abc-1ff77175b1a4-kube-api-access-6cqgp\") pod \"7e89b05e-af2c-41fc-8abc-1ff77175b1a4\" (UID: \"7e89b05e-af2c-41fc-8abc-1ff77175b1a4\") " Oct 01 14:50:09 crc kubenswrapper[4605]: I1001 14:50:09.297418 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7e89b05e-af2c-41fc-8abc-1ff77175b1a4-utilities\") pod \"7e89b05e-af2c-41fc-8abc-1ff77175b1a4\" (UID: \"7e89b05e-af2c-41fc-8abc-1ff77175b1a4\") " Oct 01 14:50:09 crc kubenswrapper[4605]: I1001 14:50:09.297570 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7e89b05e-af2c-41fc-8abc-1ff77175b1a4-catalog-content\") pod \"7e89b05e-af2c-41fc-8abc-1ff77175b1a4\" (UID: \"7e89b05e-af2c-41fc-8abc-1ff77175b1a4\") " Oct 01 14:50:09 crc kubenswrapper[4605]: I1001 14:50:09.298275 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7e89b05e-af2c-41fc-8abc-1ff77175b1a4-utilities" (OuterVolumeSpecName: "utilities") pod "7e89b05e-af2c-41fc-8abc-1ff77175b1a4" (UID: "7e89b05e-af2c-41fc-8abc-1ff77175b1a4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:50:09 crc kubenswrapper[4605]: I1001 14:50:09.298474 4605 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7e89b05e-af2c-41fc-8abc-1ff77175b1a4-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 14:50:09 crc kubenswrapper[4605]: I1001 14:50:09.308532 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7e89b05e-af2c-41fc-8abc-1ff77175b1a4-kube-api-access-6cqgp" (OuterVolumeSpecName: "kube-api-access-6cqgp") pod "7e89b05e-af2c-41fc-8abc-1ff77175b1a4" (UID: "7e89b05e-af2c-41fc-8abc-1ff77175b1a4"). InnerVolumeSpecName "kube-api-access-6cqgp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:50:09 crc kubenswrapper[4605]: I1001 14:50:09.365581 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7e89b05e-af2c-41fc-8abc-1ff77175b1a4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7e89b05e-af2c-41fc-8abc-1ff77175b1a4" (UID: "7e89b05e-af2c-41fc-8abc-1ff77175b1a4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:50:09 crc kubenswrapper[4605]: I1001 14:50:09.400046 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6cqgp\" (UniqueName: \"kubernetes.io/projected/7e89b05e-af2c-41fc-8abc-1ff77175b1a4-kube-api-access-6cqgp\") on node \"crc\" DevicePath \"\"" Oct 01 14:50:09 crc kubenswrapper[4605]: I1001 14:50:09.400083 4605 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7e89b05e-af2c-41fc-8abc-1ff77175b1a4-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 14:50:09 crc kubenswrapper[4605]: I1001 14:50:09.662400 4605 generic.go:334] "Generic (PLEG): container finished" podID="7e89b05e-af2c-41fc-8abc-1ff77175b1a4" containerID="c672b55d99d7053584562201b855d92927210d90465134aa0282d6ef440d2604" exitCode=0 Oct 01 14:50:09 crc kubenswrapper[4605]: I1001 14:50:09.662491 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-28spx" event={"ID":"7e89b05e-af2c-41fc-8abc-1ff77175b1a4","Type":"ContainerDied","Data":"c672b55d99d7053584562201b855d92927210d90465134aa0282d6ef440d2604"} Oct 01 14:50:09 crc kubenswrapper[4605]: I1001 14:50:09.662550 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-28spx" Oct 01 14:50:09 crc kubenswrapper[4605]: I1001 14:50:09.662598 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-28spx" event={"ID":"7e89b05e-af2c-41fc-8abc-1ff77175b1a4","Type":"ContainerDied","Data":"0abe5db283144476f31d2d178adc08529aac8cc9c60d40f285a73593ec271aef"} Oct 01 14:50:09 crc kubenswrapper[4605]: I1001 14:50:09.662625 4605 scope.go:117] "RemoveContainer" containerID="c672b55d99d7053584562201b855d92927210d90465134aa0282d6ef440d2604" Oct 01 14:50:09 crc kubenswrapper[4605]: I1001 14:50:09.712516 4605 scope.go:117] "RemoveContainer" containerID="a685f33b176f449f1b3aecb50c2e231a78b406f4aab7f30523addb78d7820bc4" Oct 01 14:50:09 crc kubenswrapper[4605]: I1001 14:50:09.723111 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-28spx"] Oct 01 14:50:09 crc kubenswrapper[4605]: I1001 14:50:09.735769 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-28spx"] Oct 01 14:50:09 crc kubenswrapper[4605]: I1001 14:50:09.746325 4605 scope.go:117] "RemoveContainer" containerID="31a8018ce083a1e317760c01f3c2cd035c4dddeb4e743857058998e34d26ff71" Oct 01 14:50:09 crc kubenswrapper[4605]: I1001 14:50:09.793968 4605 scope.go:117] "RemoveContainer" containerID="c672b55d99d7053584562201b855d92927210d90465134aa0282d6ef440d2604" Oct 01 14:50:09 crc kubenswrapper[4605]: E1001 14:50:09.794662 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c672b55d99d7053584562201b855d92927210d90465134aa0282d6ef440d2604\": container with ID starting with c672b55d99d7053584562201b855d92927210d90465134aa0282d6ef440d2604 not found: ID does not exist" containerID="c672b55d99d7053584562201b855d92927210d90465134aa0282d6ef440d2604" Oct 01 14:50:09 crc kubenswrapper[4605]: I1001 14:50:09.794700 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c672b55d99d7053584562201b855d92927210d90465134aa0282d6ef440d2604"} err="failed to get container status \"c672b55d99d7053584562201b855d92927210d90465134aa0282d6ef440d2604\": rpc error: code = NotFound desc = could not find container \"c672b55d99d7053584562201b855d92927210d90465134aa0282d6ef440d2604\": container with ID starting with c672b55d99d7053584562201b855d92927210d90465134aa0282d6ef440d2604 not found: ID does not exist" Oct 01 14:50:09 crc kubenswrapper[4605]: I1001 14:50:09.794722 4605 scope.go:117] "RemoveContainer" containerID="a685f33b176f449f1b3aecb50c2e231a78b406f4aab7f30523addb78d7820bc4" Oct 01 14:50:09 crc kubenswrapper[4605]: E1001 14:50:09.798867 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a685f33b176f449f1b3aecb50c2e231a78b406f4aab7f30523addb78d7820bc4\": container with ID starting with a685f33b176f449f1b3aecb50c2e231a78b406f4aab7f30523addb78d7820bc4 not found: ID does not exist" containerID="a685f33b176f449f1b3aecb50c2e231a78b406f4aab7f30523addb78d7820bc4" Oct 01 14:50:09 crc kubenswrapper[4605]: I1001 14:50:09.799017 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a685f33b176f449f1b3aecb50c2e231a78b406f4aab7f30523addb78d7820bc4"} err="failed to get container status \"a685f33b176f449f1b3aecb50c2e231a78b406f4aab7f30523addb78d7820bc4\": rpc error: code = NotFound desc = could not find container \"a685f33b176f449f1b3aecb50c2e231a78b406f4aab7f30523addb78d7820bc4\": container with ID starting with a685f33b176f449f1b3aecb50c2e231a78b406f4aab7f30523addb78d7820bc4 not found: ID does not exist" Oct 01 14:50:09 crc kubenswrapper[4605]: I1001 14:50:09.799122 4605 scope.go:117] "RemoveContainer" containerID="31a8018ce083a1e317760c01f3c2cd035c4dddeb4e743857058998e34d26ff71" Oct 01 14:50:09 crc kubenswrapper[4605]: E1001 14:50:09.799526 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"31a8018ce083a1e317760c01f3c2cd035c4dddeb4e743857058998e34d26ff71\": container with ID starting with 31a8018ce083a1e317760c01f3c2cd035c4dddeb4e743857058998e34d26ff71 not found: ID does not exist" containerID="31a8018ce083a1e317760c01f3c2cd035c4dddeb4e743857058998e34d26ff71" Oct 01 14:50:09 crc kubenswrapper[4605]: I1001 14:50:09.799550 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"31a8018ce083a1e317760c01f3c2cd035c4dddeb4e743857058998e34d26ff71"} err="failed to get container status \"31a8018ce083a1e317760c01f3c2cd035c4dddeb4e743857058998e34d26ff71\": rpc error: code = NotFound desc = could not find container \"31a8018ce083a1e317760c01f3c2cd035c4dddeb4e743857058998e34d26ff71\": container with ID starting with 31a8018ce083a1e317760c01f3c2cd035c4dddeb4e743857058998e34d26ff71 not found: ID does not exist" Oct 01 14:50:09 crc kubenswrapper[4605]: I1001 14:50:09.942045 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7e89b05e-af2c-41fc-8abc-1ff77175b1a4" path="/var/lib/kubelet/pods/7e89b05e-af2c-41fc-8abc-1ff77175b1a4/volumes" Oct 01 14:50:10 crc kubenswrapper[4605]: I1001 14:50:10.672759 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-vplqz" podUID="7757e2f9-9deb-49ad-aa12-eaca4f054ffc" containerName="registry-server" containerID="cri-o://7a3dd6f55c2fc8d1502416e27a1f72815e0da746f5384d7c1c5beb0ebc94c2bd" gracePeriod=2 Oct 01 14:50:11 crc kubenswrapper[4605]: I1001 14:50:11.161389 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vplqz" Oct 01 14:50:11 crc kubenswrapper[4605]: I1001 14:50:11.338998 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tntjt\" (UniqueName: \"kubernetes.io/projected/7757e2f9-9deb-49ad-aa12-eaca4f054ffc-kube-api-access-tntjt\") pod \"7757e2f9-9deb-49ad-aa12-eaca4f054ffc\" (UID: \"7757e2f9-9deb-49ad-aa12-eaca4f054ffc\") " Oct 01 14:50:11 crc kubenswrapper[4605]: I1001 14:50:11.339079 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7757e2f9-9deb-49ad-aa12-eaca4f054ffc-utilities\") pod \"7757e2f9-9deb-49ad-aa12-eaca4f054ffc\" (UID: \"7757e2f9-9deb-49ad-aa12-eaca4f054ffc\") " Oct 01 14:50:11 crc kubenswrapper[4605]: I1001 14:50:11.339188 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7757e2f9-9deb-49ad-aa12-eaca4f054ffc-catalog-content\") pod \"7757e2f9-9deb-49ad-aa12-eaca4f054ffc\" (UID: \"7757e2f9-9deb-49ad-aa12-eaca4f054ffc\") " Oct 01 14:50:11 crc kubenswrapper[4605]: I1001 14:50:11.339833 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7757e2f9-9deb-49ad-aa12-eaca4f054ffc-utilities" (OuterVolumeSpecName: "utilities") pod "7757e2f9-9deb-49ad-aa12-eaca4f054ffc" (UID: "7757e2f9-9deb-49ad-aa12-eaca4f054ffc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:50:11 crc kubenswrapper[4605]: I1001 14:50:11.344164 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7757e2f9-9deb-49ad-aa12-eaca4f054ffc-kube-api-access-tntjt" (OuterVolumeSpecName: "kube-api-access-tntjt") pod "7757e2f9-9deb-49ad-aa12-eaca4f054ffc" (UID: "7757e2f9-9deb-49ad-aa12-eaca4f054ffc"). InnerVolumeSpecName "kube-api-access-tntjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:50:11 crc kubenswrapper[4605]: I1001 14:50:11.362783 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7757e2f9-9deb-49ad-aa12-eaca4f054ffc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7757e2f9-9deb-49ad-aa12-eaca4f054ffc" (UID: "7757e2f9-9deb-49ad-aa12-eaca4f054ffc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:50:11 crc kubenswrapper[4605]: I1001 14:50:11.443137 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tntjt\" (UniqueName: \"kubernetes.io/projected/7757e2f9-9deb-49ad-aa12-eaca4f054ffc-kube-api-access-tntjt\") on node \"crc\" DevicePath \"\"" Oct 01 14:50:11 crc kubenswrapper[4605]: I1001 14:50:11.443165 4605 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7757e2f9-9deb-49ad-aa12-eaca4f054ffc-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 14:50:11 crc kubenswrapper[4605]: I1001 14:50:11.443175 4605 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7757e2f9-9deb-49ad-aa12-eaca4f054ffc-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 14:50:11 crc kubenswrapper[4605]: I1001 14:50:11.682858 4605 generic.go:334] "Generic (PLEG): container finished" podID="7757e2f9-9deb-49ad-aa12-eaca4f054ffc" containerID="7a3dd6f55c2fc8d1502416e27a1f72815e0da746f5384d7c1c5beb0ebc94c2bd" exitCode=0 Oct 01 14:50:11 crc kubenswrapper[4605]: I1001 14:50:11.683131 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vplqz" event={"ID":"7757e2f9-9deb-49ad-aa12-eaca4f054ffc","Type":"ContainerDied","Data":"7a3dd6f55c2fc8d1502416e27a1f72815e0da746f5384d7c1c5beb0ebc94c2bd"} Oct 01 14:50:11 crc kubenswrapper[4605]: I1001 14:50:11.683232 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vplqz" event={"ID":"7757e2f9-9deb-49ad-aa12-eaca4f054ffc","Type":"ContainerDied","Data":"a47f1a5e93f2d444e7b73ca1c8af97eed0a4a24dc49e7798fa61e06fe236ab46"} Oct 01 14:50:11 crc kubenswrapper[4605]: I1001 14:50:11.683247 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vplqz" Oct 01 14:50:11 crc kubenswrapper[4605]: I1001 14:50:11.683269 4605 scope.go:117] "RemoveContainer" containerID="7a3dd6f55c2fc8d1502416e27a1f72815e0da746f5384d7c1c5beb0ebc94c2bd" Oct 01 14:50:11 crc kubenswrapper[4605]: I1001 14:50:11.730527 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vplqz"] Oct 01 14:50:11 crc kubenswrapper[4605]: I1001 14:50:11.736519 4605 scope.go:117] "RemoveContainer" containerID="d4d5b840ab56920ca962f25776211ca89cbac61f47052c13565373ffc397d102" Oct 01 14:50:11 crc kubenswrapper[4605]: I1001 14:50:11.741757 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-vplqz"] Oct 01 14:50:11 crc kubenswrapper[4605]: I1001 14:50:11.792111 4605 scope.go:117] "RemoveContainer" containerID="739f4809bd4e85584bc8c4e64b5598132bdb437c516ca26265e1df9635165f1f" Oct 01 14:50:11 crc kubenswrapper[4605]: I1001 14:50:11.840869 4605 scope.go:117] "RemoveContainer" containerID="7a3dd6f55c2fc8d1502416e27a1f72815e0da746f5384d7c1c5beb0ebc94c2bd" Oct 01 14:50:11 crc kubenswrapper[4605]: E1001 14:50:11.841299 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7a3dd6f55c2fc8d1502416e27a1f72815e0da746f5384d7c1c5beb0ebc94c2bd\": container with ID starting with 7a3dd6f55c2fc8d1502416e27a1f72815e0da746f5384d7c1c5beb0ebc94c2bd not found: ID does not exist" containerID="7a3dd6f55c2fc8d1502416e27a1f72815e0da746f5384d7c1c5beb0ebc94c2bd" Oct 01 14:50:11 crc kubenswrapper[4605]: I1001 14:50:11.841340 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7a3dd6f55c2fc8d1502416e27a1f72815e0da746f5384d7c1c5beb0ebc94c2bd"} err="failed to get container status \"7a3dd6f55c2fc8d1502416e27a1f72815e0da746f5384d7c1c5beb0ebc94c2bd\": rpc error: code = NotFound desc = could not find container \"7a3dd6f55c2fc8d1502416e27a1f72815e0da746f5384d7c1c5beb0ebc94c2bd\": container with ID starting with 7a3dd6f55c2fc8d1502416e27a1f72815e0da746f5384d7c1c5beb0ebc94c2bd not found: ID does not exist" Oct 01 14:50:11 crc kubenswrapper[4605]: I1001 14:50:11.841370 4605 scope.go:117] "RemoveContainer" containerID="d4d5b840ab56920ca962f25776211ca89cbac61f47052c13565373ffc397d102" Oct 01 14:50:11 crc kubenswrapper[4605]: E1001 14:50:11.841721 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d4d5b840ab56920ca962f25776211ca89cbac61f47052c13565373ffc397d102\": container with ID starting with d4d5b840ab56920ca962f25776211ca89cbac61f47052c13565373ffc397d102 not found: ID does not exist" containerID="d4d5b840ab56920ca962f25776211ca89cbac61f47052c13565373ffc397d102" Oct 01 14:50:11 crc kubenswrapper[4605]: I1001 14:50:11.841741 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d4d5b840ab56920ca962f25776211ca89cbac61f47052c13565373ffc397d102"} err="failed to get container status \"d4d5b840ab56920ca962f25776211ca89cbac61f47052c13565373ffc397d102\": rpc error: code = NotFound desc = could not find container \"d4d5b840ab56920ca962f25776211ca89cbac61f47052c13565373ffc397d102\": container with ID starting with d4d5b840ab56920ca962f25776211ca89cbac61f47052c13565373ffc397d102 not found: ID does not exist" Oct 01 14:50:11 crc kubenswrapper[4605]: I1001 14:50:11.841758 4605 scope.go:117] "RemoveContainer" containerID="739f4809bd4e85584bc8c4e64b5598132bdb437c516ca26265e1df9635165f1f" Oct 01 14:50:11 crc kubenswrapper[4605]: E1001 14:50:11.842047 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"739f4809bd4e85584bc8c4e64b5598132bdb437c516ca26265e1df9635165f1f\": container with ID starting with 739f4809bd4e85584bc8c4e64b5598132bdb437c516ca26265e1df9635165f1f not found: ID does not exist" containerID="739f4809bd4e85584bc8c4e64b5598132bdb437c516ca26265e1df9635165f1f" Oct 01 14:50:11 crc kubenswrapper[4605]: I1001 14:50:11.842085 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"739f4809bd4e85584bc8c4e64b5598132bdb437c516ca26265e1df9635165f1f"} err="failed to get container status \"739f4809bd4e85584bc8c4e64b5598132bdb437c516ca26265e1df9635165f1f\": rpc error: code = NotFound desc = could not find container \"739f4809bd4e85584bc8c4e64b5598132bdb437c516ca26265e1df9635165f1f\": container with ID starting with 739f4809bd4e85584bc8c4e64b5598132bdb437c516ca26265e1df9635165f1f not found: ID does not exist" Oct 01 14:50:11 crc kubenswrapper[4605]: I1001 14:50:11.936564 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7757e2f9-9deb-49ad-aa12-eaca4f054ffc" path="/var/lib/kubelet/pods/7757e2f9-9deb-49ad-aa12-eaca4f054ffc/volumes" Oct 01 14:50:14 crc kubenswrapper[4605]: I1001 14:50:14.718390 4605 generic.go:334] "Generic (PLEG): container finished" podID="a2da0130-d7f9-4ede-a9d1-ce8a70e98c86" containerID="5e1ca03101d6585c2176a2d1218da4fa4ba12d06d2e52650199ebf3c3fdeecc3" exitCode=0 Oct 01 14:50:14 crc kubenswrapper[4605]: I1001 14:50:14.718468 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-4kxmw/must-gather-8j6bn" event={"ID":"a2da0130-d7f9-4ede-a9d1-ce8a70e98c86","Type":"ContainerDied","Data":"5e1ca03101d6585c2176a2d1218da4fa4ba12d06d2e52650199ebf3c3fdeecc3"} Oct 01 14:50:14 crc kubenswrapper[4605]: I1001 14:50:14.719773 4605 scope.go:117] "RemoveContainer" containerID="5e1ca03101d6585c2176a2d1218da4fa4ba12d06d2e52650199ebf3c3fdeecc3" Oct 01 14:50:15 crc kubenswrapper[4605]: I1001 14:50:15.526228 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-4kxmw_must-gather-8j6bn_a2da0130-d7f9-4ede-a9d1-ce8a70e98c86/gather/0.log" Oct 01 14:50:22 crc kubenswrapper[4605]: I1001 14:50:22.927615 4605 scope.go:117] "RemoveContainer" containerID="47fcc9d09b30d5b1909f661246ae31a622aeee683f33f29650e6dae968163ac2" Oct 01 14:50:22 crc kubenswrapper[4605]: E1001 14:50:22.928281 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:50:28 crc kubenswrapper[4605]: I1001 14:50:28.093415 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-4kxmw/must-gather-8j6bn"] Oct 01 14:50:28 crc kubenswrapper[4605]: I1001 14:50:28.094250 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-4kxmw/must-gather-8j6bn" podUID="a2da0130-d7f9-4ede-a9d1-ce8a70e98c86" containerName="copy" containerID="cri-o://3adee77015422ceaf3c1ef4878f473915d1fc4afc2e51bac3c3fd96c8be29e51" gracePeriod=2 Oct 01 14:50:28 crc kubenswrapper[4605]: I1001 14:50:28.103062 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-4kxmw/must-gather-8j6bn"] Oct 01 14:50:28 crc kubenswrapper[4605]: I1001 14:50:28.877705 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-4kxmw_must-gather-8j6bn_a2da0130-d7f9-4ede-a9d1-ce8a70e98c86/copy/0.log" Oct 01 14:50:28 crc kubenswrapper[4605]: I1001 14:50:28.878989 4605 generic.go:334] "Generic (PLEG): container finished" podID="a2da0130-d7f9-4ede-a9d1-ce8a70e98c86" containerID="3adee77015422ceaf3c1ef4878f473915d1fc4afc2e51bac3c3fd96c8be29e51" exitCode=143 Oct 01 14:50:28 crc kubenswrapper[4605]: I1001 14:50:28.879059 4605 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5576199bfae388c5ecb05aeb4dd43ad6d757883238b3bde5513d3116b818e71f" Oct 01 14:50:28 crc kubenswrapper[4605]: I1001 14:50:28.881492 4605 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-4kxmw_must-gather-8j6bn_a2da0130-d7f9-4ede-a9d1-ce8a70e98c86/copy/0.log" Oct 01 14:50:28 crc kubenswrapper[4605]: I1001 14:50:28.881870 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4kxmw/must-gather-8j6bn" Oct 01 14:50:28 crc kubenswrapper[4605]: I1001 14:50:28.978187 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h8dmm\" (UniqueName: \"kubernetes.io/projected/a2da0130-d7f9-4ede-a9d1-ce8a70e98c86-kube-api-access-h8dmm\") pod \"a2da0130-d7f9-4ede-a9d1-ce8a70e98c86\" (UID: \"a2da0130-d7f9-4ede-a9d1-ce8a70e98c86\") " Oct 01 14:50:28 crc kubenswrapper[4605]: I1001 14:50:28.978268 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/a2da0130-d7f9-4ede-a9d1-ce8a70e98c86-must-gather-output\") pod \"a2da0130-d7f9-4ede-a9d1-ce8a70e98c86\" (UID: \"a2da0130-d7f9-4ede-a9d1-ce8a70e98c86\") " Oct 01 14:50:29 crc kubenswrapper[4605]: I1001 14:50:29.007349 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a2da0130-d7f9-4ede-a9d1-ce8a70e98c86-kube-api-access-h8dmm" (OuterVolumeSpecName: "kube-api-access-h8dmm") pod "a2da0130-d7f9-4ede-a9d1-ce8a70e98c86" (UID: "a2da0130-d7f9-4ede-a9d1-ce8a70e98c86"). InnerVolumeSpecName "kube-api-access-h8dmm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:50:29 crc kubenswrapper[4605]: I1001 14:50:29.080620 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h8dmm\" (UniqueName: \"kubernetes.io/projected/a2da0130-d7f9-4ede-a9d1-ce8a70e98c86-kube-api-access-h8dmm\") on node \"crc\" DevicePath \"\"" Oct 01 14:50:29 crc kubenswrapper[4605]: I1001 14:50:29.172526 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a2da0130-d7f9-4ede-a9d1-ce8a70e98c86-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "a2da0130-d7f9-4ede-a9d1-ce8a70e98c86" (UID: "a2da0130-d7f9-4ede-a9d1-ce8a70e98c86"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:50:29 crc kubenswrapper[4605]: I1001 14:50:29.181862 4605 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/a2da0130-d7f9-4ede-a9d1-ce8a70e98c86-must-gather-output\") on node \"crc\" DevicePath \"\"" Oct 01 14:50:29 crc kubenswrapper[4605]: I1001 14:50:29.891859 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-4kxmw/must-gather-8j6bn" Oct 01 14:50:29 crc kubenswrapper[4605]: I1001 14:50:29.941057 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a2da0130-d7f9-4ede-a9d1-ce8a70e98c86" path="/var/lib/kubelet/pods/a2da0130-d7f9-4ede-a9d1-ce8a70e98c86/volumes" Oct 01 14:50:35 crc kubenswrapper[4605]: I1001 14:50:35.926887 4605 scope.go:117] "RemoveContainer" containerID="47fcc9d09b30d5b1909f661246ae31a622aeee683f33f29650e6dae968163ac2" Oct 01 14:50:35 crc kubenswrapper[4605]: E1001 14:50:35.927745 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:50:47 crc kubenswrapper[4605]: I1001 14:50:47.945591 4605 scope.go:117] "RemoveContainer" containerID="47fcc9d09b30d5b1909f661246ae31a622aeee683f33f29650e6dae968163ac2" Oct 01 14:50:47 crc kubenswrapper[4605]: E1001 14:50:47.946539 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:50:52 crc kubenswrapper[4605]: I1001 14:50:52.710108 4605 scope.go:117] "RemoveContainer" containerID="76509d03b2d263f167a6fc1de9496c47e42980e978a0e31caf617f644c1f8384" Oct 01 14:50:52 crc kubenswrapper[4605]: I1001 14:50:52.735140 4605 scope.go:117] "RemoveContainer" containerID="9d000d5a035327e21fb0cccf8de1f6a68d6f4c9cffd6d8d72bf0d43f0c92a1ec" Oct 01 14:50:52 crc kubenswrapper[4605]: I1001 14:50:52.789143 4605 scope.go:117] "RemoveContainer" containerID="ccbd25a81b423e652a742023578f0f25a058374af9af64600a62c2459880cc18" Oct 01 14:50:52 crc kubenswrapper[4605]: I1001 14:50:52.810724 4605 scope.go:117] "RemoveContainer" containerID="3adee77015422ceaf3c1ef4878f473915d1fc4afc2e51bac3c3fd96c8be29e51" Oct 01 14:50:52 crc kubenswrapper[4605]: I1001 14:50:52.851612 4605 scope.go:117] "RemoveContainer" containerID="6806f36a5b3177f38c3f59ee8e5cc49058e097b4dfa4e40eca6ffc97238e9068" Oct 01 14:50:52 crc kubenswrapper[4605]: I1001 14:50:52.878555 4605 scope.go:117] "RemoveContainer" containerID="5e1ca03101d6585c2176a2d1218da4fa4ba12d06d2e52650199ebf3c3fdeecc3" Oct 01 14:51:02 crc kubenswrapper[4605]: I1001 14:51:02.926423 4605 scope.go:117] "RemoveContainer" containerID="47fcc9d09b30d5b1909f661246ae31a622aeee683f33f29650e6dae968163ac2" Oct 01 14:51:02 crc kubenswrapper[4605]: E1001 14:51:02.927144 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:51:13 crc kubenswrapper[4605]: I1001 14:51:13.929841 4605 scope.go:117] "RemoveContainer" containerID="47fcc9d09b30d5b1909f661246ae31a622aeee683f33f29650e6dae968163ac2" Oct 01 14:51:13 crc kubenswrapper[4605]: E1001 14:51:13.930762 4605 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdjh7_openshift-machine-config-operator(f3023060-c8ae-492b-b1cb-a418d9a8e59f)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" podUID="f3023060-c8ae-492b-b1cb-a418d9a8e59f" Oct 01 14:51:24 crc kubenswrapper[4605]: I1001 14:51:24.928661 4605 scope.go:117] "RemoveContainer" containerID="47fcc9d09b30d5b1909f661246ae31a622aeee683f33f29650e6dae968163ac2" Oct 01 14:51:25 crc kubenswrapper[4605]: I1001 14:51:25.425717 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdjh7" event={"ID":"f3023060-c8ae-492b-b1cb-a418d9a8e59f","Type":"ContainerStarted","Data":"d18016f45947027706c96a7b7c0ee8d271451738978487727511913253630fe5"} Oct 01 14:51:29 crc kubenswrapper[4605]: I1001 14:51:29.020944 4605 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-hb5b7"] Oct 01 14:51:29 crc kubenswrapper[4605]: E1001 14:51:29.021898 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2da0130-d7f9-4ede-a9d1-ce8a70e98c86" containerName="gather" Oct 01 14:51:29 crc kubenswrapper[4605]: I1001 14:51:29.021914 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2da0130-d7f9-4ede-a9d1-ce8a70e98c86" containerName="gather" Oct 01 14:51:29 crc kubenswrapper[4605]: E1001 14:51:29.021927 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7757e2f9-9deb-49ad-aa12-eaca4f054ffc" containerName="extract-content" Oct 01 14:51:29 crc kubenswrapper[4605]: I1001 14:51:29.021933 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="7757e2f9-9deb-49ad-aa12-eaca4f054ffc" containerName="extract-content" Oct 01 14:51:29 crc kubenswrapper[4605]: E1001 14:51:29.021961 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e89b05e-af2c-41fc-8abc-1ff77175b1a4" containerName="extract-content" Oct 01 14:51:29 crc kubenswrapper[4605]: I1001 14:51:29.021967 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e89b05e-af2c-41fc-8abc-1ff77175b1a4" containerName="extract-content" Oct 01 14:51:29 crc kubenswrapper[4605]: E1001 14:51:29.021976 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7757e2f9-9deb-49ad-aa12-eaca4f054ffc" containerName="registry-server" Oct 01 14:51:29 crc kubenswrapper[4605]: I1001 14:51:29.021982 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="7757e2f9-9deb-49ad-aa12-eaca4f054ffc" containerName="registry-server" Oct 01 14:51:29 crc kubenswrapper[4605]: E1001 14:51:29.021994 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7757e2f9-9deb-49ad-aa12-eaca4f054ffc" containerName="extract-utilities" Oct 01 14:51:29 crc kubenswrapper[4605]: I1001 14:51:29.022000 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="7757e2f9-9deb-49ad-aa12-eaca4f054ffc" containerName="extract-utilities" Oct 01 14:51:29 crc kubenswrapper[4605]: E1001 14:51:29.022013 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e89b05e-af2c-41fc-8abc-1ff77175b1a4" containerName="extract-utilities" Oct 01 14:51:29 crc kubenswrapper[4605]: I1001 14:51:29.022019 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e89b05e-af2c-41fc-8abc-1ff77175b1a4" containerName="extract-utilities" Oct 01 14:51:29 crc kubenswrapper[4605]: E1001 14:51:29.022038 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e89b05e-af2c-41fc-8abc-1ff77175b1a4" containerName="registry-server" Oct 01 14:51:29 crc kubenswrapper[4605]: I1001 14:51:29.022044 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e89b05e-af2c-41fc-8abc-1ff77175b1a4" containerName="registry-server" Oct 01 14:51:29 crc kubenswrapper[4605]: E1001 14:51:29.022055 4605 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2da0130-d7f9-4ede-a9d1-ce8a70e98c86" containerName="copy" Oct 01 14:51:29 crc kubenswrapper[4605]: I1001 14:51:29.022063 4605 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2da0130-d7f9-4ede-a9d1-ce8a70e98c86" containerName="copy" Oct 01 14:51:29 crc kubenswrapper[4605]: I1001 14:51:29.022271 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="7757e2f9-9deb-49ad-aa12-eaca4f054ffc" containerName="registry-server" Oct 01 14:51:29 crc kubenswrapper[4605]: I1001 14:51:29.022300 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="a2da0130-d7f9-4ede-a9d1-ce8a70e98c86" containerName="copy" Oct 01 14:51:29 crc kubenswrapper[4605]: I1001 14:51:29.022318 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="a2da0130-d7f9-4ede-a9d1-ce8a70e98c86" containerName="gather" Oct 01 14:51:29 crc kubenswrapper[4605]: I1001 14:51:29.022333 4605 memory_manager.go:354] "RemoveStaleState removing state" podUID="7e89b05e-af2c-41fc-8abc-1ff77175b1a4" containerName="registry-server" Oct 01 14:51:29 crc kubenswrapper[4605]: I1001 14:51:29.024233 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hb5b7" Oct 01 14:51:29 crc kubenswrapper[4605]: I1001 14:51:29.042215 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-hb5b7"] Oct 01 14:51:29 crc kubenswrapper[4605]: I1001 14:51:29.204956 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bhcpd\" (UniqueName: \"kubernetes.io/projected/2cd2bee9-7c5d-4119-a9d9-d5361d0c5a30-kube-api-access-bhcpd\") pod \"redhat-operators-hb5b7\" (UID: \"2cd2bee9-7c5d-4119-a9d9-d5361d0c5a30\") " pod="openshift-marketplace/redhat-operators-hb5b7" Oct 01 14:51:29 crc kubenswrapper[4605]: I1001 14:51:29.205280 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2cd2bee9-7c5d-4119-a9d9-d5361d0c5a30-utilities\") pod \"redhat-operators-hb5b7\" (UID: \"2cd2bee9-7c5d-4119-a9d9-d5361d0c5a30\") " pod="openshift-marketplace/redhat-operators-hb5b7" Oct 01 14:51:29 crc kubenswrapper[4605]: I1001 14:51:29.205382 4605 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2cd2bee9-7c5d-4119-a9d9-d5361d0c5a30-catalog-content\") pod \"redhat-operators-hb5b7\" (UID: \"2cd2bee9-7c5d-4119-a9d9-d5361d0c5a30\") " pod="openshift-marketplace/redhat-operators-hb5b7" Oct 01 14:51:29 crc kubenswrapper[4605]: I1001 14:51:29.307317 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2cd2bee9-7c5d-4119-a9d9-d5361d0c5a30-utilities\") pod \"redhat-operators-hb5b7\" (UID: \"2cd2bee9-7c5d-4119-a9d9-d5361d0c5a30\") " pod="openshift-marketplace/redhat-operators-hb5b7" Oct 01 14:51:29 crc kubenswrapper[4605]: I1001 14:51:29.307661 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2cd2bee9-7c5d-4119-a9d9-d5361d0c5a30-catalog-content\") pod \"redhat-operators-hb5b7\" (UID: \"2cd2bee9-7c5d-4119-a9d9-d5361d0c5a30\") " pod="openshift-marketplace/redhat-operators-hb5b7" Oct 01 14:51:29 crc kubenswrapper[4605]: I1001 14:51:29.307853 4605 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bhcpd\" (UniqueName: \"kubernetes.io/projected/2cd2bee9-7c5d-4119-a9d9-d5361d0c5a30-kube-api-access-bhcpd\") pod \"redhat-operators-hb5b7\" (UID: \"2cd2bee9-7c5d-4119-a9d9-d5361d0c5a30\") " pod="openshift-marketplace/redhat-operators-hb5b7" Oct 01 14:51:29 crc kubenswrapper[4605]: I1001 14:51:29.307945 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2cd2bee9-7c5d-4119-a9d9-d5361d0c5a30-utilities\") pod \"redhat-operators-hb5b7\" (UID: \"2cd2bee9-7c5d-4119-a9d9-d5361d0c5a30\") " pod="openshift-marketplace/redhat-operators-hb5b7" Oct 01 14:51:29 crc kubenswrapper[4605]: I1001 14:51:29.309026 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2cd2bee9-7c5d-4119-a9d9-d5361d0c5a30-catalog-content\") pod \"redhat-operators-hb5b7\" (UID: \"2cd2bee9-7c5d-4119-a9d9-d5361d0c5a30\") " pod="openshift-marketplace/redhat-operators-hb5b7" Oct 01 14:51:29 crc kubenswrapper[4605]: I1001 14:51:29.329966 4605 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bhcpd\" (UniqueName: \"kubernetes.io/projected/2cd2bee9-7c5d-4119-a9d9-d5361d0c5a30-kube-api-access-bhcpd\") pod \"redhat-operators-hb5b7\" (UID: \"2cd2bee9-7c5d-4119-a9d9-d5361d0c5a30\") " pod="openshift-marketplace/redhat-operators-hb5b7" Oct 01 14:51:29 crc kubenswrapper[4605]: I1001 14:51:29.402566 4605 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hb5b7" Oct 01 14:51:29 crc kubenswrapper[4605]: I1001 14:51:29.896188 4605 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-hb5b7"] Oct 01 14:51:29 crc kubenswrapper[4605]: W1001 14:51:29.919294 4605 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2cd2bee9_7c5d_4119_a9d9_d5361d0c5a30.slice/crio-8b104f741e83673b272f11dcd0b33923ccec5c1fc14dd143ebf9ac4d6ad04109 WatchSource:0}: Error finding container 8b104f741e83673b272f11dcd0b33923ccec5c1fc14dd143ebf9ac4d6ad04109: Status 404 returned error can't find the container with id 8b104f741e83673b272f11dcd0b33923ccec5c1fc14dd143ebf9ac4d6ad04109 Oct 01 14:51:30 crc kubenswrapper[4605]: I1001 14:51:30.490896 4605 generic.go:334] "Generic (PLEG): container finished" podID="2cd2bee9-7c5d-4119-a9d9-d5361d0c5a30" containerID="4e0d556dcba8755fb263ad61b97cece493ebeb6892d55926c4b7a8911141cd5a" exitCode=0 Oct 01 14:51:30 crc kubenswrapper[4605]: I1001 14:51:30.491344 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hb5b7" event={"ID":"2cd2bee9-7c5d-4119-a9d9-d5361d0c5a30","Type":"ContainerDied","Data":"4e0d556dcba8755fb263ad61b97cece493ebeb6892d55926c4b7a8911141cd5a"} Oct 01 14:51:30 crc kubenswrapper[4605]: I1001 14:51:30.491370 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hb5b7" event={"ID":"2cd2bee9-7c5d-4119-a9d9-d5361d0c5a30","Type":"ContainerStarted","Data":"8b104f741e83673b272f11dcd0b33923ccec5c1fc14dd143ebf9ac4d6ad04109"} Oct 01 14:51:31 crc kubenswrapper[4605]: I1001 14:51:31.502263 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hb5b7" event={"ID":"2cd2bee9-7c5d-4119-a9d9-d5361d0c5a30","Type":"ContainerStarted","Data":"72475ca0b5f22811e4fbfb24d8048a09f0aab16e5f0269bafa7035211e18164c"} Oct 01 14:51:34 crc kubenswrapper[4605]: I1001 14:51:34.535299 4605 generic.go:334] "Generic (PLEG): container finished" podID="2cd2bee9-7c5d-4119-a9d9-d5361d0c5a30" containerID="72475ca0b5f22811e4fbfb24d8048a09f0aab16e5f0269bafa7035211e18164c" exitCode=0 Oct 01 14:51:34 crc kubenswrapper[4605]: I1001 14:51:34.535356 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hb5b7" event={"ID":"2cd2bee9-7c5d-4119-a9d9-d5361d0c5a30","Type":"ContainerDied","Data":"72475ca0b5f22811e4fbfb24d8048a09f0aab16e5f0269bafa7035211e18164c"} Oct 01 14:51:36 crc kubenswrapper[4605]: I1001 14:51:36.559484 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hb5b7" event={"ID":"2cd2bee9-7c5d-4119-a9d9-d5361d0c5a30","Type":"ContainerStarted","Data":"a2f706d781f2384e2a0176eba3e3379a056e9d1912ecd59413ab1886d55d1bd6"} Oct 01 14:51:36 crc kubenswrapper[4605]: I1001 14:51:36.583729 4605 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-hb5b7" podStartSLOduration=3.757817601 podStartE2EDuration="8.583707388s" podCreationTimestamp="2025-10-01 14:51:28 +0000 UTC" firstStartedPulling="2025-10-01 14:51:30.49342788 +0000 UTC m=+4013.237404088" lastFinishedPulling="2025-10-01 14:51:35.319317667 +0000 UTC m=+4018.063293875" observedRunningTime="2025-10-01 14:51:36.576539037 +0000 UTC m=+4019.320515285" watchObservedRunningTime="2025-10-01 14:51:36.583707388 +0000 UTC m=+4019.327683596" Oct 01 14:51:39 crc kubenswrapper[4605]: I1001 14:51:39.404344 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-hb5b7" Oct 01 14:51:39 crc kubenswrapper[4605]: I1001 14:51:39.406334 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-hb5b7" Oct 01 14:51:40 crc kubenswrapper[4605]: I1001 14:51:40.506953 4605 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-hb5b7" podUID="2cd2bee9-7c5d-4119-a9d9-d5361d0c5a30" containerName="registry-server" probeResult="failure" output=< Oct 01 14:51:40 crc kubenswrapper[4605]: timeout: failed to connect service ":50051" within 1s Oct 01 14:51:40 crc kubenswrapper[4605]: > Oct 01 14:51:49 crc kubenswrapper[4605]: I1001 14:51:49.468131 4605 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-hb5b7" Oct 01 14:51:49 crc kubenswrapper[4605]: I1001 14:51:49.525203 4605 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-hb5b7" Oct 01 14:51:49 crc kubenswrapper[4605]: I1001 14:51:49.707917 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-hb5b7"] Oct 01 14:51:50 crc kubenswrapper[4605]: I1001 14:51:50.713805 4605 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-hb5b7" podUID="2cd2bee9-7c5d-4119-a9d9-d5361d0c5a30" containerName="registry-server" containerID="cri-o://a2f706d781f2384e2a0176eba3e3379a056e9d1912ecd59413ab1886d55d1bd6" gracePeriod=2 Oct 01 14:51:51 crc kubenswrapper[4605]: I1001 14:51:51.219071 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hb5b7" Oct 01 14:51:51 crc kubenswrapper[4605]: I1001 14:51:51.390775 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bhcpd\" (UniqueName: \"kubernetes.io/projected/2cd2bee9-7c5d-4119-a9d9-d5361d0c5a30-kube-api-access-bhcpd\") pod \"2cd2bee9-7c5d-4119-a9d9-d5361d0c5a30\" (UID: \"2cd2bee9-7c5d-4119-a9d9-d5361d0c5a30\") " Oct 01 14:51:51 crc kubenswrapper[4605]: I1001 14:51:51.390959 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2cd2bee9-7c5d-4119-a9d9-d5361d0c5a30-utilities\") pod \"2cd2bee9-7c5d-4119-a9d9-d5361d0c5a30\" (UID: \"2cd2bee9-7c5d-4119-a9d9-d5361d0c5a30\") " Oct 01 14:51:51 crc kubenswrapper[4605]: I1001 14:51:51.391062 4605 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2cd2bee9-7c5d-4119-a9d9-d5361d0c5a30-catalog-content\") pod \"2cd2bee9-7c5d-4119-a9d9-d5361d0c5a30\" (UID: \"2cd2bee9-7c5d-4119-a9d9-d5361d0c5a30\") " Oct 01 14:51:51 crc kubenswrapper[4605]: I1001 14:51:51.391910 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2cd2bee9-7c5d-4119-a9d9-d5361d0c5a30-utilities" (OuterVolumeSpecName: "utilities") pod "2cd2bee9-7c5d-4119-a9d9-d5361d0c5a30" (UID: "2cd2bee9-7c5d-4119-a9d9-d5361d0c5a30"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:51:51 crc kubenswrapper[4605]: I1001 14:51:51.394500 4605 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2cd2bee9-7c5d-4119-a9d9-d5361d0c5a30-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 14:51:51 crc kubenswrapper[4605]: I1001 14:51:51.396176 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2cd2bee9-7c5d-4119-a9d9-d5361d0c5a30-kube-api-access-bhcpd" (OuterVolumeSpecName: "kube-api-access-bhcpd") pod "2cd2bee9-7c5d-4119-a9d9-d5361d0c5a30" (UID: "2cd2bee9-7c5d-4119-a9d9-d5361d0c5a30"). InnerVolumeSpecName "kube-api-access-bhcpd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 14:51:51 crc kubenswrapper[4605]: I1001 14:51:51.476352 4605 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2cd2bee9-7c5d-4119-a9d9-d5361d0c5a30-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2cd2bee9-7c5d-4119-a9d9-d5361d0c5a30" (UID: "2cd2bee9-7c5d-4119-a9d9-d5361d0c5a30"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 14:51:51 crc kubenswrapper[4605]: I1001 14:51:51.496342 4605 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2cd2bee9-7c5d-4119-a9d9-d5361d0c5a30-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 14:51:51 crc kubenswrapper[4605]: I1001 14:51:51.496382 4605 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bhcpd\" (UniqueName: \"kubernetes.io/projected/2cd2bee9-7c5d-4119-a9d9-d5361d0c5a30-kube-api-access-bhcpd\") on node \"crc\" DevicePath \"\"" Oct 01 14:51:51 crc kubenswrapper[4605]: I1001 14:51:51.723953 4605 generic.go:334] "Generic (PLEG): container finished" podID="2cd2bee9-7c5d-4119-a9d9-d5361d0c5a30" containerID="a2f706d781f2384e2a0176eba3e3379a056e9d1912ecd59413ab1886d55d1bd6" exitCode=0 Oct 01 14:51:51 crc kubenswrapper[4605]: I1001 14:51:51.724003 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hb5b7" event={"ID":"2cd2bee9-7c5d-4119-a9d9-d5361d0c5a30","Type":"ContainerDied","Data":"a2f706d781f2384e2a0176eba3e3379a056e9d1912ecd59413ab1886d55d1bd6"} Oct 01 14:51:51 crc kubenswrapper[4605]: I1001 14:51:51.724035 4605 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hb5b7" event={"ID":"2cd2bee9-7c5d-4119-a9d9-d5361d0c5a30","Type":"ContainerDied","Data":"8b104f741e83673b272f11dcd0b33923ccec5c1fc14dd143ebf9ac4d6ad04109"} Oct 01 14:51:51 crc kubenswrapper[4605]: I1001 14:51:51.724067 4605 scope.go:117] "RemoveContainer" containerID="a2f706d781f2384e2a0176eba3e3379a056e9d1912ecd59413ab1886d55d1bd6" Oct 01 14:51:51 crc kubenswrapper[4605]: I1001 14:51:51.724201 4605 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hb5b7" Oct 01 14:51:51 crc kubenswrapper[4605]: I1001 14:51:51.749055 4605 scope.go:117] "RemoveContainer" containerID="72475ca0b5f22811e4fbfb24d8048a09f0aab16e5f0269bafa7035211e18164c" Oct 01 14:51:51 crc kubenswrapper[4605]: I1001 14:51:51.760398 4605 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-hb5b7"] Oct 01 14:51:51 crc kubenswrapper[4605]: I1001 14:51:51.776250 4605 scope.go:117] "RemoveContainer" containerID="4e0d556dcba8755fb263ad61b97cece493ebeb6892d55926c4b7a8911141cd5a" Oct 01 14:51:51 crc kubenswrapper[4605]: I1001 14:51:51.782766 4605 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-hb5b7"] Oct 01 14:51:51 crc kubenswrapper[4605]: I1001 14:51:51.834837 4605 scope.go:117] "RemoveContainer" containerID="a2f706d781f2384e2a0176eba3e3379a056e9d1912ecd59413ab1886d55d1bd6" Oct 01 14:51:51 crc kubenswrapper[4605]: E1001 14:51:51.835237 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a2f706d781f2384e2a0176eba3e3379a056e9d1912ecd59413ab1886d55d1bd6\": container with ID starting with a2f706d781f2384e2a0176eba3e3379a056e9d1912ecd59413ab1886d55d1bd6 not found: ID does not exist" containerID="a2f706d781f2384e2a0176eba3e3379a056e9d1912ecd59413ab1886d55d1bd6" Oct 01 14:51:51 crc kubenswrapper[4605]: I1001 14:51:51.835356 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a2f706d781f2384e2a0176eba3e3379a056e9d1912ecd59413ab1886d55d1bd6"} err="failed to get container status \"a2f706d781f2384e2a0176eba3e3379a056e9d1912ecd59413ab1886d55d1bd6\": rpc error: code = NotFound desc = could not find container \"a2f706d781f2384e2a0176eba3e3379a056e9d1912ecd59413ab1886d55d1bd6\": container with ID starting with a2f706d781f2384e2a0176eba3e3379a056e9d1912ecd59413ab1886d55d1bd6 not found: ID does not exist" Oct 01 14:51:51 crc kubenswrapper[4605]: I1001 14:51:51.835501 4605 scope.go:117] "RemoveContainer" containerID="72475ca0b5f22811e4fbfb24d8048a09f0aab16e5f0269bafa7035211e18164c" Oct 01 14:51:51 crc kubenswrapper[4605]: E1001 14:51:51.835856 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"72475ca0b5f22811e4fbfb24d8048a09f0aab16e5f0269bafa7035211e18164c\": container with ID starting with 72475ca0b5f22811e4fbfb24d8048a09f0aab16e5f0269bafa7035211e18164c not found: ID does not exist" containerID="72475ca0b5f22811e4fbfb24d8048a09f0aab16e5f0269bafa7035211e18164c" Oct 01 14:51:51 crc kubenswrapper[4605]: I1001 14:51:51.835881 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"72475ca0b5f22811e4fbfb24d8048a09f0aab16e5f0269bafa7035211e18164c"} err="failed to get container status \"72475ca0b5f22811e4fbfb24d8048a09f0aab16e5f0269bafa7035211e18164c\": rpc error: code = NotFound desc = could not find container \"72475ca0b5f22811e4fbfb24d8048a09f0aab16e5f0269bafa7035211e18164c\": container with ID starting with 72475ca0b5f22811e4fbfb24d8048a09f0aab16e5f0269bafa7035211e18164c not found: ID does not exist" Oct 01 14:51:51 crc kubenswrapper[4605]: I1001 14:51:51.835895 4605 scope.go:117] "RemoveContainer" containerID="4e0d556dcba8755fb263ad61b97cece493ebeb6892d55926c4b7a8911141cd5a" Oct 01 14:51:51 crc kubenswrapper[4605]: E1001 14:51:51.836193 4605 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4e0d556dcba8755fb263ad61b97cece493ebeb6892d55926c4b7a8911141cd5a\": container with ID starting with 4e0d556dcba8755fb263ad61b97cece493ebeb6892d55926c4b7a8911141cd5a not found: ID does not exist" containerID="4e0d556dcba8755fb263ad61b97cece493ebeb6892d55926c4b7a8911141cd5a" Oct 01 14:51:51 crc kubenswrapper[4605]: I1001 14:51:51.836302 4605 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4e0d556dcba8755fb263ad61b97cece493ebeb6892d55926c4b7a8911141cd5a"} err="failed to get container status \"4e0d556dcba8755fb263ad61b97cece493ebeb6892d55926c4b7a8911141cd5a\": rpc error: code = NotFound desc = could not find container \"4e0d556dcba8755fb263ad61b97cece493ebeb6892d55926c4b7a8911141cd5a\": container with ID starting with 4e0d556dcba8755fb263ad61b97cece493ebeb6892d55926c4b7a8911141cd5a not found: ID does not exist" Oct 01 14:51:51 crc kubenswrapper[4605]: I1001 14:51:51.937540 4605 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2cd2bee9-7c5d-4119-a9d9-d5361d0c5a30" path="/var/lib/kubelet/pods/2cd2bee9-7c5d-4119-a9d9-d5361d0c5a30/volumes" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515067240040024443 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015067240041017361 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015067227537016523 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015067227537015473 5ustar corecore